file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
gru_model.py | "]
unique_intent = list(set(intent))
sentences = list(df["Sentence"])
return (intent, unique_intent, sentences)
intent, unique_intent, sentences = load_dataset("Dataset.csv")
intent
sentences
print(sentences[:10])
nltk.download("stopwords")
nltk.download("punkt")
#define stemmer
stemmer = LancasterStemmer()
"""# 3. Data Cleaning"""
def cleaning(sentences):
words = []
for s in sentences:
clean = re.sub(r'[^ a-z A-Z 0-9]', " ", s)
w = word_tokenize(clean)
#stemming
words.append([i.lower() for i in w])
return words
cleaned_words = cleaning(sentences)
print(len(cleaned_words))
print(cleaned_words[:2])
"""### 3.1 Keras Tokenizer"""
def create_tokenizer(words, filters = '!"#$%&()*+,-./:;<=>?@[\]^_`{|}~'):
token = Tokenizer(filters = filters)
token.fit_on_texts(words)
return token
def max_length(words):
return(len(max(words, key = len)))
word_tokenizer = create_tokenizer(cleaned_words)
vocab_size = len(word_tokenizer.word_index) + 1
max_length = max_length(cleaned_words)
print("Vocab Size = %d and Maximum length = %d" % (vocab_size, max_length))
"""### 3.2 One Hot Encoding for Model Fed"""
def encoding_doc(token, words):
return(token.texts_to_sequences(words))
encoded_doc = encoding_doc(word_tokenizer, cleaned_words)
def padding_doc(encoded_doc, max_length):
return(pad_sequences(encoded_doc, maxlen = max_length, padding = "post"))
padded_doc = padding_doc(encoded_doc, max_length)
padded_doc[:5]
print("Shape of padded docs = ",padded_doc.shape)
#tokenizer with filter changed
output_tokenizer = create_tokenizer(unique_intent, filters = '!"#$%&()*+,-/:;<=>?@[\]^`{|}~')
output_tokenizer.word_index
encoded_output = encoding_doc(output_tokenizer, intent)
encoded_output = np.array(encoded_output).reshape(len(encoded_output), 1)
encoded_output.shape
def one_hot(encode):
o = OneHotEncoder(sparse = False)
return(o.fit_transform(encode))
output_one_hot = one_hot(encoded_output)
output_one_hot.shape
"""# 4. Train and Validation Split"""
from sklearn.model_selection import train_test_split
train_X, val_X, train_Y, val_Y = train_test_split(padded_doc, output_one_hot, shuffle = True, test_size = 0.2)
print("Shape of train_X = %s and train_Y = %s" % (train_X.shape, train_Y.shape))
print("Shape of val_X = %s and val_Y = %s" % (val_X.shape, val_Y.shape))
"""# 5. GRU Modeling"""
def create_model(vocab_size, max_length):
model = Sequential()
model.add(Embedding(vocab_size, 128, input_length = max_length, trainable = False))
model.add(Bidirectional(LSTM(128)))
# model.add(LSTM(128))
model.add(Dense(32, activation = "relu"))
model.add(Dropout(0.5))
model.add(Dense(21, activation = "softmax"))
return model
def create_model(vocab_size, max_length):
model = Sequential()
model.add(Embedding(vocab_size, 128, input_length = max_length, trainable = False))
model.add(Bidirectional(LSTM(128, return_sequences=True)))
model.add(Bidirectional(LSTM(64)))
# model.add(LSTM(128))
model.add(Dense(32, activation = "relu"))
model.add(Dropout(0.5))
model.add(Dense(21, activation = "softmax"))
return model
model = create_model(vocab_size, max_length)
model.compile(loss = "categorical_crossentropy", optimizer = "adam", metrics = ["accuracy"])
model.summary()
"""# 6. Training"""
filename = 'model.h5'
checkpoint = ModelCheckpoint(filename, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
hist = model.fit(train_X, train_Y, epochs = 100, batch_size = 32, validation_data = (val_X, val_Y), callbacks = [checkpoint])
loss = pd.DataFrame({'loss': model.history.history['accuracy'], 'auc': model.history.history['val_accuracy'] })
loss.plot()
model = load_model("model.h5")
def predictions(text):
clean = re.sub(r'[^ a-z A-Z 0-9]', " ", text)
test_word = word_tokenize(clean)
test_word = [w.lower() for w in test_word]
test_ls = word_tokenizer.texts_to_sequences(test_word)
print(test_word)
#Check for unknown words
if [] in test_ls:
test_ls = list(filter(None, test_ls))
test_ls = np.array(test_ls).reshape(1, len(test_ls))
x = padding_doc(test_ls, max_length)
pred = model.predict_proba(x)
return pred
def get_final_output(pred, classes):
predictions = pred[0]
classes = np.array(classes)
ids = np.argsort(-predictions)
classes = classes[ids]
predictions = -np.sort(-predictions)
for i in range(pred.shape[1]):
print("%s has confidence = %s" % (classes[i], (predictions[i])))
"""# 7. Testing"""
text = "Can you help me?"
pred = predictions(text)
get_final_output(pred, unique_intent)
"""# 8. Save/Load Pickle"""
# from sklearn.externals import joblib
# joblib.dump(model, 'modelnlp.pkl')
# nlp_model = open('modelnlp.pkl','rb')
# nlp = joblib.load(nlp_model)
# !pip install git+https://github.com/TinkerMob/keras_albert_model.git
# from keras_albert_model import build_albert
"""# 9. Experiment with Monkeyzlearn API"""
from monkeylearn import MonkeyLearn
ml = MonkeyLearn('e7e230d51a8668a72eea86c29559bef04bd6c8fb')
data = ["Hi Feco, looks promising, I would like to schedule a call tomorrow and see the demo. What times do you have available? Thanks, Ryan."]
model_id = 'cl_v9GTn7zi'
result = ml.classifiers.classify(model_id, data)
print(result.body)
# !pip install monkeylearn
"""# 10. BERT Model"""
!pip install bert-for-tf2
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import json
import os
from sklearn.metrics import roc_curve
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Input, Dense, Embedding, Activation, LSTM, SimpleRNN, Dropout
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
import bert
from tqdm import tqdm
from tensorflow.keras import backend as K
import tensorflow as tf
import tensorflow_hub as hub
print("TensorFlow Version:",tf.__version__)
print("Hub version: ",hub.__version__)
# Params for bert model
class BertModel(object):
| current_segment_id = 0
for token in tokens:
segments.append(current_segment_id)
if token == "[SEP]":
current_segment_id = 1
return segments + [0] * (max_seq_length - len(tokens))
def get_ids(self,tokens, tokenizer, max_seq_length):
"""Token ids from Tokenizer vocab"""
token_ids = tokenizer.convert_tokens_to_ids(tokens,)
input_ids = token_ids + [0] * (max_seq_length-len(token_ids))
return input_ids
def create_single_input(self,sentence,maxlen):
stokens = self.tokenizer.tokenize(sentence)
stokens = stokens[:maxlen]
stokens = ["[CLS]"] + stokens + ["[SEP]"]
ids = self.get_ids(stokens, self.tokenizer, self.max_len)
masks = self.get_masks | def __init__(self):
self.max_len = 128
bert_path = "https://tfhub.dev/tensorflow/bert_en_uncased_L-12_H-768_A-12/1"
FullTokenizer=bert.bert_tokenization.FullTokenizer
self.bert_module = hub.KerasLayer(bert_path,trainable=True)
self.vocab_file = self.bert_module.resolved_object.vocab_file.asset_path.numpy()
self.do_lower_case = self.bert_module.resolved_object.do_lower_case.numpy()
self.tokenizer = FullTokenizer(self.vocab_file,self.do_lower_case)
def get_masks(self,tokens, max_seq_length):
return [1]*len(tokens) + [0] * (max_seq_length - len(tokens))
def get_segments(self,tokens, max_seq_length):
"""Segments: 0 for the first sequence, 1 for the second"""
segments = [] | identifier_body |
Lab01Code.py | 3 = c2.mod_mul(Bn(27),p)
c = c1.mod_add(c3,p)
try:
assert c != 0
except:
raise Exception('invalid curve')
#check if points are equal
try:
assert p1 != p2
except:
raise Exception('EC Points must not be equal')
#checking the points and different cases
if p1 == (None,None) and p2 == (None, None):
return (None,None)
elif (x0 == x1) and (y0.mod_add(y1,p)==0):
return (None,None)
elif (x0 == None or y0 == None) and (x1 != None and y1 != None):
return p2
elif (x1 == None or y1 == None) and (x0 != None and y0 != None):
return p1
elif y0 != None and x0 != None and y1 != None and x1 != None:
#check if the points are valid with an additional check
#through an exception
try:
assert p1 != p2
assert p1 != (x1,(-y1))
except:
raise Exception('EC Points must not be equal')
if y1 == 0:
lam0 = -y0
else:
lam0 = y1.mod_sub(y0,p)
if x1 == 0:
lam1 = -x0
else:
lam1 = x1.mod_sub(x0,p)
#condition check if the gradient is 0
if lam0 == 0 or lam1 == 0:
xr = -x0.mod_sub(x1,p)
yr = -y1
#check if the point is on the curve
if xr == None or yr == None:
return (None, None)
try:
assert is_point_on_curve(a, b, p, xr, yr)
except:
raise Exception('The new point is not valid')
#do calculations on the numbers that can give valid xr,yr point
else:
lam2 = lam1.mod_inverse(p)
lam = lam0.mod_mul(lam2,p)
xr0 = lam.mod_pow(Bn(2),p)
xr1 = xr0.mod_sub(x0,p)
xr = xr1.mod_sub(x1,p)
yr0 = x0.mod_sub(xr,p)
yr1 = lam.mod_mul(yr0,p)
yr = yr1.mod_sub(y0,p)
#check if the new point is valid and if it is then return it
try:
assert is_point_on_curve(a, b, p, xr, yr)
except:
raise Exception('The new point is not valid')
#check if any part is None, it may never be!
if xr == None or yr == None:
return (None, None)
return (xr, yr)
def point_double(a, b, p, x, y):
"""Define "doubling" an EC point.
A special case, when a point needs to be added to itself.
Reminder:
lam = 3 * x ^ 2 + a * (2 * y) ^ -1 (mod p)
xr = lam ^ 2 - 2 * xp
yr = lam * (xp - xr) - yp (mod p)
Returns the point representing the double of the input (x, y).
"""
xr, yr = None, None
p1 = (x,y)
#check the input point for validity
try:
assert is_point_on_curve(a, b, p, x, y)
except:
raise Exception('not a valid point')
#check curve 4a^3+27b^2 != 0 mod p for validity.
c0 = a.mod_pow(Bn(3),p)
c1 = c0.mod_mul(Bn(4),p)
c2 = b.mod_pow(Bn(2),p)
c3 = c2.mod_mul(Bn(27),p)
c = c1.mod_add(c3,p)
try:
assert c != 0
except:
raise Exception('invalid curve')
#verify the input point
if p1 == (None,None):
return (None,None)
elif p1 == (0,0):
return (0,0)
elif y == None or y == 0:
return (None, None)
#calculate the new point== doubled point
else:
if x == 0:
xp2 = a
else:
xp0 = x.mod_pow(Bn(2),p)
xp1 = xp0.mod_mul(Bn(3),p)
xp2 = xp1.mod_add(a,p)
yp0 = y.mod_mul(Bn(2),p)
if yp0 != 0:
yp = yp0.mod_inverse(p)
else:
yp = 0;
if (xp2 != 0 and yp != 0):
#calculate gradient if the points are not zero
lam = xp2.mod_mul(yp,p)
#calculate new x coordinate
xr0 = lam.mod_pow(Bn(2),p)
xr1 = x.mod_mul(Bn(2),p)
xr = xr0.mod_sub(xr1,p)
#calcualte new y coordinate
yr0 = x.mod_sub(xr,p)
yr1 = lam.mod_mul(yr0,p)
yr = yr1.mod_sub(y,p)
if (xr == None or yr == None):
return (None, None)
else:
xr = -x.mod_mul(Bn(2),p)
yr = -y
if (xr == None or yr == None):
return (None, None)
#check whether the new point is valid whcih is passed from the previous if statement
try:
assert is_point_on_curve(a, b, p, x, y)
except:
raise Exception('The new point is not valid')
return xr, yr
def point_scalar_multiplication_double_and_add(a, b, p, x, y, scalar):
"""
Implement Point multiplication with a scalar:
r * (x, y) = (x, y) + ... + (x, y) (r times)
Reminder of Double and Multiply algorithm: r * P
Q = infinity
for i = 0 to num_bits(P)-1
if bit i of P == 1 then
Q = Q + P
P = 2 * P
return Q
"""
Q = (None, None)
P = (x, y)
binary = bin(scalar)
for i in range(scalar.num_bits()):
if binary[scalar.num_bits()-i+1] == '1':
Q = point_add(a, b, p, Q[0], Q[1], P[0], P[1])
#print Q
pass
P = point_double(a, b, p, P[0],P[1])
pass
return Q
def point_scalar_multiplication_montgomerry_ladder(a, b, p, x, y, scalar):
"""
Implement Point multiplication with a scalar:
r * (x, y) = (x, y) + ... + (x, y) (r times)
Reminder of Double and Multiply algorithm: r * P
R0 = infinity
R1 = P
for i in num_bits(P)-1 to zero:
if di = 0:
R1 = R0 + R1
R0 = 2R0
else
R0 = R0 + R1
R1 = 2 R1
return R0
"""
R0 = (None, None)
R1 = (x, y)
#convert the scalar variable to binary
binary = bin(scalar)
#start the scan checking each bit
for i in reversed(range(0,scalar.num_bits())):
#if bit is 0 do the addition and double R0
if binary[scalar.num_bits()-i+1] == '0':
R1 = point_add(a, b, p, R0[0], R0[1], R1[0], R1[1])
R0 = point_double(a, b, p, R0[0],R0[1])
#if bit is not zero then do the addition and double R1
else:
R0 = point_add(a, b, p, R0[0], R0[1], R1[0], R1[1])
R1 = point_double(a, b, p, R1[0],R1[1])
return R0
#####################################################
# TASK 4 -- Standard ECDSA signatures
#
# - Implement a key / param generation
# - Implement ECDSA signature using petlib.ecdsa
# - Implement ECDSA signature verification
# using petlib.ecdsa
from hashlib import sha256
from petlib.ec import EcGroup
from petlib.ecdsa import do_ecdsa_sign, do_ecdsa_verify
def | ecdsa_key_gen | identifier_name |
|
Lab01Code.py | y1):
| assert is_point_on_curve(a, b, p, x1, y1)
except:
raise Exception('not valid points')
#check curve 4a^3+27b^2 != 0 mod p.
c0 = a.mod_pow(Bn(3),p)
c1 = c0.mod_mul(Bn(4),p)
c2 = b.mod_pow(Bn(2),p)
c3 = c2.mod_mul(Bn(27),p)
c = c1.mod_add(c3,p)
try:
assert c != 0
except:
raise Exception('invalid curve')
#check if points are equal
try:
assert p1 != p2
except:
raise Exception('EC Points must not be equal')
#checking the points and different cases
if p1 == (None,None) and p2 == (None, None):
return (None,None)
elif (x0 == x1) and (y0.mod_add(y1,p)==0):
return (None,None)
elif (x0 == None or y0 == None) and (x1 != None and y1 != None):
return p2
elif (x1 == None or y1 == None) and (x0 != None and y0 != None):
return p1
elif y0 != None and x0 != None and y1 != None and x1 != None:
#check if the points are valid with an additional check
#through an exception
try:
assert p1 != p2
assert p1 != (x1,(-y1))
except:
raise Exception('EC Points must not be equal')
if y1 == 0:
lam0 = -y0
else:
lam0 = y1.mod_sub(y0,p)
if x1 == 0:
lam1 = -x0
else:
lam1 = x1.mod_sub(x0,p)
#condition check if the gradient is 0
if lam0 == 0 or lam1 == 0:
xr = -x0.mod_sub(x1,p)
yr = -y1
#check if the point is on the curve
if xr == None or yr == None:
return (None, None)
try:
assert is_point_on_curve(a, b, p, xr, yr)
except:
raise Exception('The new point is not valid')
#do calculations on the numbers that can give valid xr,yr point
else:
lam2 = lam1.mod_inverse(p)
lam = lam0.mod_mul(lam2,p)
xr0 = lam.mod_pow(Bn(2),p)
xr1 = xr0.mod_sub(x0,p)
xr = xr1.mod_sub(x1,p)
yr0 = x0.mod_sub(xr,p)
yr1 = lam.mod_mul(yr0,p)
yr = yr1.mod_sub(y0,p)
#check if the new point is valid and if it is then return it
try:
assert is_point_on_curve(a, b, p, xr, yr)
except:
raise Exception('The new point is not valid')
#check if any part is None, it may never be!
if xr == None or yr == None:
return (None, None)
return (xr, yr)
def point_double(a, b, p, x, y):
"""Define "doubling" an EC point.
A special case, when a point needs to be added to itself.
Reminder:
lam = 3 * x ^ 2 + a * (2 * y) ^ -1 (mod p)
xr = lam ^ 2 - 2 * xp
yr = lam * (xp - xr) - yp (mod p)
Returns the point representing the double of the input (x, y).
"""
xr, yr = None, None
p1 = (x,y)
#check the input point for validity
try:
assert is_point_on_curve(a, b, p, x, y)
except:
raise Exception('not a valid point')
#check curve 4a^3+27b^2 != 0 mod p for validity.
c0 = a.mod_pow(Bn(3),p)
c1 = c0.mod_mul(Bn(4),p)
c2 = b.mod_pow(Bn(2),p)
c3 = c2.mod_mul(Bn(27),p)
c = c1.mod_add(c3,p)
try:
assert c != 0
except:
raise Exception('invalid curve')
#verify the input point
if p1 == (None,None):
return (None,None)
elif p1 == (0,0):
return (0,0)
elif y == None or y == 0:
return (None, None)
#calculate the new point== doubled point
else:
if x == 0:
xp2 = a
else:
xp0 = x.mod_pow(Bn(2),p)
xp1 = xp0.mod_mul(Bn(3),p)
xp2 = xp1.mod_add(a,p)
yp0 = y.mod_mul(Bn(2),p)
if yp0 != 0:
yp = yp0.mod_inverse(p)
else:
yp = 0;
if (xp2 != 0 and yp != 0):
#calculate gradient if the points are not zero
lam = xp2.mod_mul(yp,p)
#calculate new x coordinate
xr0 = lam.mod_pow(Bn(2),p)
xr1 = x.mod_mul(Bn(2),p)
xr = xr0.mod_sub(xr1,p)
#calcualte new y coordinate
yr0 = x.mod_sub(xr,p)
yr1 = lam.mod_mul(yr0,p)
yr = yr1.mod_sub(y,p)
if (xr == None or yr == None):
return (None, None)
else:
xr = -x.mod_mul(Bn(2),p)
yr = -y
if (xr == None or yr == None):
return (None, None)
#check whether the new point is valid whcih is passed from the previous if statement
try:
assert is_point_on_curve(a, b, p, x, y)
except:
raise Exception('The new point is not valid')
return xr, yr
def point_scalar_multiplication_double_and_add(a, b, p, x, y, scalar):
"""
Implement Point multiplication with a scalar:
r * (x, y) = (x, y) + ... + (x, y) (r times)
Reminder of Double and Multiply algorithm: r * P
Q = infinity
for i = 0 to num_bits(P)-1
if bit i of P == 1 then
Q = Q + P
P = 2 * P
return Q
"""
Q = (None, None)
P = (x, y)
binary = bin(scalar)
for i in range(scalar.num_bits()):
if binary[scalar.num_bits()-i+1] == '1':
Q = point_add(a, b, p, Q[0], Q[1], P[0], P[1])
#print Q
pass
P = point_double(a, b, p, P[0],P[1])
pass
return Q
def point_scalar_multiplication_montgomerry_ladder(a, b, p, x, y, scalar):
"""
Implement Point multiplication with a scalar:
r * (x, y) = (x, y) + ... + (x, y) (r times)
Reminder of Double and Multiply algorithm: r * P
R0 = infinity
R1 = P
for i in num_bits(P)-1 to zero:
if di = 0:
R1 = R0 + R1
R0 = 2R0
else
R0 = R0 + R1
R1 = 2 R1
return R0
"""
R0 = (None, None)
R1 = (x, y)
#convert the | """Define the "addition" operation for 2 EC Points.
Reminder: (xr, yr) = (xq, yq) + (xp, yp)
is defined as:
lam = yq - yp * (xq - xp)^-1 (mod p)
xr = lam^2 - xp - xq (mod p)
yr = lam * (xp - xr) - yp (mod p)
Return the point resulting from the addition. Raises an Exception if the points are equal.
"""
#initilise new coordinates
xr, yr = None, None
#create tuples for the input points
p1 = (x0,y0)
p2 = (x1,y1)
#check validity of the points
try:
assert is_point_on_curve(a, b, p, x0, y0) | identifier_body |
Lab01Code.py | 1):
"""Define the "addition" operation for 2 EC Points.
Reminder: (xr, yr) = (xq, yq) + (xp, yp)
is defined as:
lam = yq - yp * (xq - xp)^-1 (mod p)
xr = lam^2 - xp - xq (mod p)
yr = lam * (xp - xr) - yp (mod p)
Return the point resulting from the addition. Raises an Exception if the points are equal.
"""
#initilise new coordinates
xr, yr = None, None
#create tuples for the input points
p1 = (x0,y0)
p2 = (x1,y1)
#check validity of the points
try:
assert is_point_on_curve(a, b, p, x0, y0)
assert is_point_on_curve(a, b, p, x1, y1)
except:
raise Exception('not valid points')
#check curve 4a^3+27b^2 != 0 mod p.
c0 = a.mod_pow(Bn(3),p)
c1 = c0.mod_mul(Bn(4),p)
c2 = b.mod_pow(Bn(2),p)
c3 = c2.mod_mul(Bn(27),p)
c = c1.mod_add(c3,p)
try:
assert c != 0
except:
raise Exception('invalid curve')
#check if points are equal
try:
assert p1 != p2
except:
raise Exception('EC Points must not be equal')
#checking the points and different cases
if p1 == (None,None) and p2 == (None, None):
return (None,None)
elif (x0 == x1) and (y0.mod_add(y1,p)==0):
return (None,None)
elif (x0 == None or y0 == None) and (x1 != None and y1 != None):
return p2
elif (x1 == None or y1 == None) and (x0 != None and y0 != None):
return p1
elif y0 != None and x0 != None and y1 != None and x1 != None:
#check if the points are valid with an additional check
#through an exception
try:
assert p1 != p2
assert p1 != (x1,(-y1))
except:
raise Exception('EC Points must not be equal')
if y1 == 0:
lam0 = -y0
else:
lam0 = y1.mod_sub(y0,p)
if x1 == 0:
lam1 = -x0
else:
lam1 = x1.mod_sub(x0,p)
#condition check if the gradient is 0
if lam0 == 0 or lam1 == 0:
xr = -x0.mod_sub(x1,p)
yr = -y1
#check if the point is on the curve
if xr == None or yr == None:
return (None, None)
try:
assert is_point_on_curve(a, b, p, xr, yr)
except:
raise Exception('The new point is not valid')
#do calculations on the numbers that can give valid xr,yr point
else:
lam2 = lam1.mod_inverse(p)
lam = lam0.mod_mul(lam2,p)
xr0 = lam.mod_pow(Bn(2),p)
xr1 = xr0.mod_sub(x0,p)
xr = xr1.mod_sub(x1,p)
yr0 = x0.mod_sub(xr,p)
yr1 = lam.mod_mul(yr0,p)
yr = yr1.mod_sub(y0,p)
#check if the new point is valid and if it is then return it
try:
assert is_point_on_curve(a, b, p, xr, yr)
except:
raise Exception('The new point is not valid')
#check if any part is None, it may never be!
if xr == None or yr == None:
return (None, None)
return (xr, yr)
def point_double(a, b, p, x, y):
"""Define "doubling" an EC point.
A special case, when a point needs to be added to itself.
Reminder:
lam = 3 * x ^ 2 + a * (2 * y) ^ -1 (mod p)
xr = lam ^ 2 - 2 * xp
yr = lam * (xp - xr) - yp (mod p)
Returns the point representing the double of the input (x, y).
"""
xr, yr = None, None
p1 = (x,y)
#check the input point for validity
try:
assert is_point_on_curve(a, b, p, x, y)
except:
raise Exception('not a valid point')
#check curve 4a^3+27b^2 != 0 mod p for validity.
c0 = a.mod_pow(Bn(3),p)
c1 = c0.mod_mul(Bn(4),p)
c2 = b.mod_pow(Bn(2),p)
c3 = c2.mod_mul(Bn(27),p)
c = c1.mod_add(c3,p)
try:
assert c != 0
except:
raise Exception('invalid curve')
#verify the input point
if p1 == (None,None):
return (None,None)
elif p1 == (0,0):
return (0,0)
elif y == None or y == 0:
return (None, None)
#calculate the new point== doubled point
else:
if x == 0:
xp2 = a
else:
xp0 = x.mod_pow(Bn(2),p)
xp1 = xp0.mod_mul(Bn(3),p)
xp2 = xp1.mod_add(a,p)
yp0 = y.mod_mul(Bn(2),p)
if yp0 != 0:
|
else:
yp = 0;
if (xp2 != 0 and yp != 0):
#calculate gradient if the points are not zero
lam = xp2.mod_mul(yp,p)
#calculate new x coordinate
xr0 = lam.mod_pow(Bn(2),p)
xr1 = x.mod_mul(Bn(2),p)
xr = xr0.mod_sub(xr1,p)
#calcualte new y coordinate
yr0 = x.mod_sub(xr,p)
yr1 = lam.mod_mul(yr0,p)
yr = yr1.mod_sub(y,p)
if (xr == None or yr == None):
return (None, None)
else:
xr = -x.mod_mul(Bn(2),p)
yr = -y
if (xr == None or yr == None):
return (None, None)
#check whether the new point is valid whcih is passed from the previous if statement
try:
assert is_point_on_curve(a, b, p, x, y)
except:
raise Exception('The new point is not valid')
return xr, yr
def point_scalar_multiplication_double_and_add(a, b, p, x, y, scalar):
"""
Implement Point multiplication with a scalar:
r * (x, y) = (x, y) + ... + (x, y) (r times)
Reminder of Double and Multiply algorithm: r * P
Q = infinity
for i = 0 to num_bits(P)-1
if bit i of P == 1 then
Q = Q + P
P = 2 * P
return Q
"""
Q = (None, None)
P = (x, y)
binary = bin(scalar)
for i in range(scalar.num_bits()):
if binary[scalar.num_bits()-i+1] == '1':
Q = point_add(a, b, p, Q[0], Q[1], P[0], P[1])
#print Q
pass
P = point_double(a, b, p, P[0],P[1])
pass
return Q
def point_scalar_multiplication_montgomerry_ladder(a, b, p, x, y, scalar):
"""
Implement Point multiplication with a scalar:
r * (x, y) = (x, y) + ... + (x, y) (r times)
Reminder of Double and Multiply algorithm: r * P
R0 = infinity
R1 = P
for i in num_bits(P)-1 to zero:
if di = 0:
R1 = R0 + R1
R0 = 2R0
else
R0 = R0 + R1
R1 = 2 R1
return R0
"""
R0 = (None, None)
R1 = (x, y)
#convert the | yp = yp0.mod_inverse(p) | conditional_block |
Lab01Code.py | y1):
"""Define the "addition" operation for 2 EC Points.
Reminder: (xr, yr) = (xq, yq) + (xp, yp)
is defined as:
lam = yq - yp * (xq - xp)^-1 (mod p)
xr = lam^2 - xp - xq (mod p)
yr = lam * (xp - xr) - yp (mod p)
Return the point resulting from the addition. Raises an Exception if the points are equal.
"""
#initilise new coordinates
xr, yr = None, None
#create tuples for the input points
p1 = (x0,y0)
p2 = (x1,y1)
#check validity of the points
try:
assert is_point_on_curve(a, b, p, x0, y0)
assert is_point_on_curve(a, b, p, x1, y1)
except:
raise Exception('not valid points')
#check curve 4a^3+27b^2 != 0 mod p.
c0 = a.mod_pow(Bn(3),p)
c1 = c0.mod_mul(Bn(4),p)
c2 = b.mod_pow(Bn(2),p)
c3 = c2.mod_mul(Bn(27),p)
c = c1.mod_add(c3,p)
try:
assert c != 0
except:
raise Exception('invalid curve')
#check if points are equal
try:
assert p1 != p2
except:
raise Exception('EC Points must not be equal')
#checking the points and different cases
if p1 == (None,None) and p2 == (None, None):
return (None,None)
elif (x0 == x1) and (y0.mod_add(y1,p)==0):
return (None,None)
elif (x0 == None or y0 == None) and (x1 != None and y1 != None):
return p2
elif (x1 == None or y1 == None) and (x0 != None and y0 != None):
return p1
elif y0 != None and x0 != None and y1 != None and x1 != None:
#check if the points are valid with an additional check
#through an exception
try:
assert p1 != p2
assert p1 != (x1,(-y1))
except:
raise Exception('EC Points must not be equal')
if y1 == 0:
lam0 = -y0
else:
lam0 = y1.mod_sub(y0,p)
if x1 == 0:
lam1 = -x0
else:
lam1 = x1.mod_sub(x0,p)
#condition check if the gradient is 0
if lam0 == 0 or lam1 == 0:
xr = -x0.mod_sub(x1,p)
yr = -y1
#check if the point is on the curve
if xr == None or yr == None:
return (None, None)
try:
assert is_point_on_curve(a, b, p, xr, yr)
except:
raise Exception('The new point is not valid')
#do calculations on the numbers that can give valid xr,yr point
else:
lam2 = lam1.mod_inverse(p)
lam = lam0.mod_mul(lam2,p)
xr0 = lam.mod_pow(Bn(2),p)
xr1 = xr0.mod_sub(x0,p)
xr = xr1.mod_sub(x1,p)
yr0 = x0.mod_sub(xr,p)
yr1 = lam.mod_mul(yr0,p)
yr = yr1.mod_sub(y0,p)
#check if the new point is valid and if it is then return it
try:
assert is_point_on_curve(a, b, p, xr, yr)
except:
raise Exception('The new point is not valid')
#check if any part is None, it may never be!
if xr == None or yr == None:
return (None, None)
return (xr, yr)
def point_double(a, b, p, x, y):
"""Define "doubling" an EC point.
A special case, when a point needs to be added to itself.
Reminder:
lam = 3 * x ^ 2 + a * (2 * y) ^ -1 (mod p)
xr = lam ^ 2 - 2 * xp
yr = lam * (xp - xr) - yp (mod p)
Returns the point representing the double of the input (x, y).
"""
xr, yr = None, None
p1 = (x,y)
#check the input point for validity
try:
assert is_point_on_curve(a, b, p, x, y)
except:
raise Exception('not a valid point')
#check curve 4a^3+27b^2 != 0 mod p for validity.
c0 = a.mod_pow(Bn(3),p)
c1 = c0.mod_mul(Bn(4),p)
c2 = b.mod_pow(Bn(2),p)
c3 = c2.mod_mul(Bn(27),p)
c = c1.mod_add(c3,p)
try:
assert c != 0
except:
raise Exception('invalid curve') | return (0,0)
elif y == None or y == 0:
return (None, None)
#calculate the new point== doubled point
else:
if x == 0:
xp2 = a
else:
xp0 = x.mod_pow(Bn(2),p)
xp1 = xp0.mod_mul(Bn(3),p)
xp2 = xp1.mod_add(a,p)
yp0 = y.mod_mul(Bn(2),p)
if yp0 != 0:
yp = yp0.mod_inverse(p)
else:
yp = 0;
if (xp2 != 0 and yp != 0):
#calculate gradient if the points are not zero
lam = xp2.mod_mul(yp,p)
#calculate new x coordinate
xr0 = lam.mod_pow(Bn(2),p)
xr1 = x.mod_mul(Bn(2),p)
xr = xr0.mod_sub(xr1,p)
#calcualte new y coordinate
yr0 = x.mod_sub(xr,p)
yr1 = lam.mod_mul(yr0,p)
yr = yr1.mod_sub(y,p)
if (xr == None or yr == None):
return (None, None)
else:
xr = -x.mod_mul(Bn(2),p)
yr = -y
if (xr == None or yr == None):
return (None, None)
#check whether the new point is valid whcih is passed from the previous if statement
try:
assert is_point_on_curve(a, b, p, x, y)
except:
raise Exception('The new point is not valid')
return xr, yr
def point_scalar_multiplication_double_and_add(a, b, p, x, y, scalar):
"""
Implement Point multiplication with a scalar:
r * (x, y) = (x, y) + ... + (x, y) (r times)
Reminder of Double and Multiply algorithm: r * P
Q = infinity
for i = 0 to num_bits(P)-1
if bit i of P == 1 then
Q = Q + P
P = 2 * P
return Q
"""
Q = (None, None)
P = (x, y)
binary = bin(scalar)
for i in range(scalar.num_bits()):
if binary[scalar.num_bits()-i+1] == '1':
Q = point_add(a, b, p, Q[0], Q[1], P[0], P[1])
#print Q
pass
P = point_double(a, b, p, P[0],P[1])
pass
return Q
def point_scalar_multiplication_montgomerry_ladder(a, b, p, x, y, scalar):
"""
Implement Point multiplication with a scalar:
r * (x, y) = (x, y) + ... + (x, y) (r times)
Reminder of Double and Multiply algorithm: r * P
R0 = infinity
R1 = P
for i in num_bits(P)-1 to zero:
if di = 0:
R1 = R0 + R1
R0 = 2R0
else
R0 = R0 + R1
R1 = 2 R1
return R0
"""
R0 = (None, None)
R1 = (x, y)
#convert |
#verify the input point
if p1 == (None,None):
return (None,None)
elif p1 == (0,0): | random_line_split |
lower.rs | (),
diagnostics: vec![],
unknown_ty: TyKind::Unknown.intern(),
}
}
}
impl Index<LocalTypeRefId> for LowerTyMap {
type Output = Ty;
fn index(&self, expr: LocalTypeRefId) -> &Ty {
self.type_ref_to_type.get(expr).unwrap_or(&self.unknown_ty)
}
}
impl LowerTyMap {
/// Adds all the `LowerDiagnostic`s of the result to the `DiagnosticSink`.
pub(crate) fn add_diagnostics(
&self,
db: &dyn HirDatabase,
file_id: FileId,
source_map: &TypeRefSourceMap,
sink: &mut DiagnosticSink,
) {
self.diagnostics
.iter()
.for_each(|it| it.add_to(db, file_id, source_map, sink))
}
}
impl Ty {
/// Tries to lower a HIR type reference to an actual resolved type. Besides the type also
/// returns an diagnostics that where encountered along the way.
pub(crate) fn from_hir(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
type_ref: LocalTypeRefId,
) -> (Ty, Vec<diagnostics::LowerDiagnostic>) {
let mut diagnostics = Vec::new();
let ty =
Ty::from_hir_with_diagnostics(db, resolver, type_ref_map, &mut diagnostics, type_ref);
(ty, diagnostics)
}
/// Tries to lower a HIR type reference to an actual resolved type. Takes a mutable reference
/// to a `Vec` which will hold any diagnostics encountered a long the way.
fn from_hir_with_diagnostics(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
diagnostics: &mut Vec<LowerDiagnostic>,
type_ref: LocalTypeRefId,
) -> Ty {
let res = match &type_ref_map[type_ref] {
TypeRef::Path(path) => Ty::from_path(db, resolver, type_ref, path, diagnostics),
TypeRef::Error => Some(TyKind::Unknown.intern()),
TypeRef::Tuple(inner) => {
let inner_tys = inner.iter().map(|tr| {
Self::from_hir_with_diagnostics(db, resolver, type_ref_map, diagnostics, *tr)
});
Some(TyKind::Tuple(inner_tys.len(), inner_tys.collect()).intern())
}
TypeRef::Never => Some(TyKind::Never.intern()),
TypeRef::Array(inner) => {
let inner = Self::from_hir_with_diagnostics(
db,
resolver,
type_ref_map,
diagnostics,
*inner,
);
Some(TyKind::Array(inner).intern())
}
};
if let Some(ty) = res {
ty
} else {
diagnostics.push(LowerDiagnostic::UnresolvedType { id: type_ref });
TyKind::Unknown.intern()
}
}
/// Constructs a `Ty` from a path.
fn from_path(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref: LocalTypeRefId,
path: &Path,
diagnostics: &mut Vec<LowerDiagnostic>,
) -> Option<Self> {
// Find the type
let (ty, vis) = resolver.resolve_path_as_type_fully(db.upcast(), path)?;
// Get the definition and visibility
let def = match ty {
TypeNs::StructId(id) => TypableDef::Struct(id.into()),
TypeNs::TypeAliasId(id) => TypableDef::TypeAlias(id.into()),
TypeNs::PrimitiveType(id) => TypableDef::PrimitiveType(id),
};
// Get the current module and see if the type is visible from here
if let Some(module) = resolver.module() {
if !vis.is_visible_from(db, module) {
diagnostics.push(LowerDiagnostic::TypeIsPrivate { id: type_ref })
}
}
Some(db.type_for_def(def, Namespace::Types))
}
}
/// Resolves all types in the specified `TypeRefMap`.
pub fn lower_types(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
) -> Arc<LowerTyMap> {
let mut result = LowerTyMap::default();
for (id, _) in type_ref_map.iter() {
let ty =
Ty::from_hir_with_diagnostics(db, resolver, type_ref_map, &mut result.diagnostics, id);
result.type_ref_to_type.insert(id, ty);
}
Arc::new(result)
}
pub fn lower_struct_query(db: &dyn HirDatabase, s: Struct) -> Arc<LowerTyMap> {
let data = s.data(db.upcast());
lower_types(db, &s.id.resolver(db.upcast()), data.type_ref_map())
}
pub fn lower_type_alias_query(db: &dyn HirDatabase, t: TypeAlias) -> Arc<LowerTyMap> {
let data = t.data(db.upcast());
lower_types(db, &t.id.resolver(db.upcast()), data.type_ref_map())
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TypableDef {
Function(Function),
PrimitiveType(PrimitiveType),
Struct(Struct),
TypeAlias(TypeAlias),
}
impl From<Function> for TypableDef {
fn from(f: Function) -> Self {
TypableDef::Function(f)
}
}
impl From<PrimitiveType> for TypableDef {
fn from(f: PrimitiveType) -> Self {
TypableDef::PrimitiveType(f)
}
}
impl From<Struct> for TypableDef {
fn from(f: Struct) -> Self {
TypableDef::Struct(f)
}
}
impl From<ModuleDef> for Option<TypableDef> {
fn from(d: ModuleDef) -> Self {
match d {
ModuleDef::Function(f) => Some(TypableDef::Function(f)),
ModuleDef::PrimitiveType(t) => Some(TypableDef::PrimitiveType(t)),
ModuleDef::Struct(t) => Some(TypableDef::Struct(t)),
ModuleDef::TypeAlias(t) => Some(TypableDef::TypeAlias(t)),
ModuleDef::Module(_) => None,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CallableDef {
Function(Function),
Struct(Struct),
}
impl_froms!(CallableDef: Function, Struct);
impl CallableDef {
pub fn is_function(self) -> bool {
matches!(self, CallableDef::Function(_))
}
pub fn is_struct(self) -> bool {
matches!(self, CallableDef::Struct(_))
}
}
impl HasVisibility for CallableDef {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
match self {
CallableDef::Struct(strukt) => strukt.visibility(db),
CallableDef::Function(function) => function.visibility(db),
}
}
}
/// Build the declared type of an item. This depends on the namespace; e.g. for
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
/// the constructor function `(usize) -> Foo` which lives in the values
/// namespace.
pub(crate) fn type_for_def(db: &dyn HirDatabase, def: TypableDef, ns: Namespace) -> Ty {
match (def, ns) {
(TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f),
(TypableDef::PrimitiveType(t), Namespace::Types) => type_for_primitive(t),
(TypableDef::Struct(s), Namespace::Values) => type_for_struct_constructor(db, s),
(TypableDef::Struct(s), Namespace::Types) => type_for_struct(db, s),
(TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t),
// 'error' cases:
(TypableDef::Function(_), Namespace::Types) => TyKind::Unknown.intern(),
(TypableDef::PrimitiveType(_), Namespace::Values) => TyKind::Unknown.intern(),
(TypableDef::TypeAlias(_), Namespace::Values) => TyKind::Unknown.intern(),
}
}
/// Build the declared type of a static.
fn type_for_primitive(def: PrimitiveType) -> Ty {
match def {
PrimitiveType::Float(f) => TyKind::Float(f.into()),
PrimitiveType::Int(i) => TyKind::Int(i.into()),
PrimitiveType::Bool => TyKind::Bool,
}
.intern()
}
/// Build the declared type of a function. This should not need to look at the
/// function body.
fn type_for_fn(_db: &dyn HirDatabase, def: Function) -> Ty {
TyKind::FnDef(def.into(), Substitution::empty()).intern()
}
pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDef) -> FnSig {
match def {
CallableDef::Function(f) => fn_sig_for_fn(db, f), | CallableDef::Struct(s) => fn_sig_for_struct_constructor(db, s),
}
}
pub(crate) fn fn_sig_for_fn(db: &dyn HirDatabase, def: Function) -> FnSig {
let data = def.data(db.upcast());
let resolver = def.id.resolver(db.upcast());
let params = data
| random_line_split |
|
lower.rs | TyMap {
/// Adds all the `LowerDiagnostic`s of the result to the `DiagnosticSink`.
pub(crate) fn add_diagnostics(
&self,
db: &dyn HirDatabase,
file_id: FileId,
source_map: &TypeRefSourceMap,
sink: &mut DiagnosticSink,
) {
self.diagnostics
.iter()
.for_each(|it| it.add_to(db, file_id, source_map, sink))
}
}
impl Ty {
/// Tries to lower a HIR type reference to an actual resolved type. Besides the type also
/// returns an diagnostics that where encountered along the way.
pub(crate) fn from_hir(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
type_ref: LocalTypeRefId,
) -> (Ty, Vec<diagnostics::LowerDiagnostic>) {
let mut diagnostics = Vec::new();
let ty =
Ty::from_hir_with_diagnostics(db, resolver, type_ref_map, &mut diagnostics, type_ref);
(ty, diagnostics)
}
/// Tries to lower a HIR type reference to an actual resolved type. Takes a mutable reference
/// to a `Vec` which will hold any diagnostics encountered a long the way.
fn from_hir_with_diagnostics(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
diagnostics: &mut Vec<LowerDiagnostic>,
type_ref: LocalTypeRefId,
) -> Ty {
let res = match &type_ref_map[type_ref] {
TypeRef::Path(path) => Ty::from_path(db, resolver, type_ref, path, diagnostics),
TypeRef::Error => Some(TyKind::Unknown.intern()),
TypeRef::Tuple(inner) => {
let inner_tys = inner.iter().map(|tr| {
Self::from_hir_with_diagnostics(db, resolver, type_ref_map, diagnostics, *tr)
});
Some(TyKind::Tuple(inner_tys.len(), inner_tys.collect()).intern())
}
TypeRef::Never => Some(TyKind::Never.intern()),
TypeRef::Array(inner) => {
let inner = Self::from_hir_with_diagnostics(
db,
resolver,
type_ref_map,
diagnostics,
*inner,
);
Some(TyKind::Array(inner).intern())
}
};
if let Some(ty) = res {
ty
} else {
diagnostics.push(LowerDiagnostic::UnresolvedType { id: type_ref });
TyKind::Unknown.intern()
}
}
/// Constructs a `Ty` from a path.
fn from_path(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref: LocalTypeRefId,
path: &Path,
diagnostics: &mut Vec<LowerDiagnostic>,
) -> Option<Self> {
// Find the type
let (ty, vis) = resolver.resolve_path_as_type_fully(db.upcast(), path)?;
// Get the definition and visibility
let def = match ty {
TypeNs::StructId(id) => TypableDef::Struct(id.into()),
TypeNs::TypeAliasId(id) => TypableDef::TypeAlias(id.into()),
TypeNs::PrimitiveType(id) => TypableDef::PrimitiveType(id),
};
// Get the current module and see if the type is visible from here
if let Some(module) = resolver.module() {
if !vis.is_visible_from(db, module) {
diagnostics.push(LowerDiagnostic::TypeIsPrivate { id: type_ref })
}
}
Some(db.type_for_def(def, Namespace::Types))
}
}
/// Resolves all types in the specified `TypeRefMap`.
pub fn lower_types(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
) -> Arc<LowerTyMap> {
let mut result = LowerTyMap::default();
for (id, _) in type_ref_map.iter() {
let ty =
Ty::from_hir_with_diagnostics(db, resolver, type_ref_map, &mut result.diagnostics, id);
result.type_ref_to_type.insert(id, ty);
}
Arc::new(result)
}
pub fn lower_struct_query(db: &dyn HirDatabase, s: Struct) -> Arc<LowerTyMap> {
let data = s.data(db.upcast());
lower_types(db, &s.id.resolver(db.upcast()), data.type_ref_map())
}
pub fn lower_type_alias_query(db: &dyn HirDatabase, t: TypeAlias) -> Arc<LowerTyMap> {
let data = t.data(db.upcast());
lower_types(db, &t.id.resolver(db.upcast()), data.type_ref_map())
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TypableDef {
Function(Function),
PrimitiveType(PrimitiveType),
Struct(Struct),
TypeAlias(TypeAlias),
}
impl From<Function> for TypableDef {
fn from(f: Function) -> Self {
TypableDef::Function(f)
}
}
impl From<PrimitiveType> for TypableDef {
fn from(f: PrimitiveType) -> Self {
TypableDef::PrimitiveType(f)
}
}
impl From<Struct> for TypableDef {
fn from(f: Struct) -> Self {
TypableDef::Struct(f)
}
}
impl From<ModuleDef> for Option<TypableDef> {
fn from(d: ModuleDef) -> Self {
match d {
ModuleDef::Function(f) => Some(TypableDef::Function(f)),
ModuleDef::PrimitiveType(t) => Some(TypableDef::PrimitiveType(t)),
ModuleDef::Struct(t) => Some(TypableDef::Struct(t)),
ModuleDef::TypeAlias(t) => Some(TypableDef::TypeAlias(t)),
ModuleDef::Module(_) => None,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CallableDef {
Function(Function),
Struct(Struct),
}
impl_froms!(CallableDef: Function, Struct);
impl CallableDef {
pub fn is_function(self) -> bool {
matches!(self, CallableDef::Function(_))
}
pub fn is_struct(self) -> bool {
matches!(self, CallableDef::Struct(_))
}
}
impl HasVisibility for CallableDef {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
match self {
CallableDef::Struct(strukt) => strukt.visibility(db),
CallableDef::Function(function) => function.visibility(db),
}
}
}
/// Build the declared type of an item. This depends on the namespace; e.g. for
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
/// the constructor function `(usize) -> Foo` which lives in the values
/// namespace.
pub(crate) fn type_for_def(db: &dyn HirDatabase, def: TypableDef, ns: Namespace) -> Ty {
match (def, ns) {
(TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f),
(TypableDef::PrimitiveType(t), Namespace::Types) => type_for_primitive(t),
(TypableDef::Struct(s), Namespace::Values) => type_for_struct_constructor(db, s),
(TypableDef::Struct(s), Namespace::Types) => type_for_struct(db, s),
(TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t),
// 'error' cases:
(TypableDef::Function(_), Namespace::Types) => TyKind::Unknown.intern(),
(TypableDef::PrimitiveType(_), Namespace::Values) => TyKind::Unknown.intern(),
(TypableDef::TypeAlias(_), Namespace::Values) => TyKind::Unknown.intern(),
}
}
/// Build the declared type of a static.
fn type_for_primitive(def: PrimitiveType) -> Ty {
match def {
PrimitiveType::Float(f) => TyKind::Float(f.into()),
PrimitiveType::Int(i) => TyKind::Int(i.into()),
PrimitiveType::Bool => TyKind::Bool,
}
.intern()
}
/// Build the declared type of a function. This should not need to look at the
/// function body.
fn type_for_fn(_db: &dyn HirDatabase, def: Function) -> Ty {
TyKind::FnDef(def.into(), Substitution::empty()).intern()
}
pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDef) -> FnSig {
match def {
CallableDef::Function(f) => fn_sig_for_fn(db, f),
CallableDef::Struct(s) => fn_sig_for_struct_constructor(db, s),
}
}
pub(crate) fn fn_sig_for_fn(db: &dyn HirDatabase, def: Function) -> FnSig | {
let data = def.data(db.upcast());
let resolver = def.id.resolver(db.upcast());
let params = data
.params()
.iter()
.map(|tr| Ty::from_hir(db, &resolver, data.type_ref_map(), *tr).0)
.collect::<Vec<_>>();
let ret = Ty::from_hir(db, &resolver, data.type_ref_map(), *data.ret_type()).0;
FnSig::from_params_and_return(params, ret)
} | identifier_body |
|
lower.rs | Vec<diagnostics::LowerDiagnostic>) {
let mut diagnostics = Vec::new();
let ty =
Ty::from_hir_with_diagnostics(db, resolver, type_ref_map, &mut diagnostics, type_ref);
(ty, diagnostics)
}
/// Tries to lower a HIR type reference to an actual resolved type. Takes a mutable reference
/// to a `Vec` which will hold any diagnostics encountered a long the way.
fn from_hir_with_diagnostics(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
diagnostics: &mut Vec<LowerDiagnostic>,
type_ref: LocalTypeRefId,
) -> Ty {
let res = match &type_ref_map[type_ref] {
TypeRef::Path(path) => Ty::from_path(db, resolver, type_ref, path, diagnostics),
TypeRef::Error => Some(TyKind::Unknown.intern()),
TypeRef::Tuple(inner) => {
let inner_tys = inner.iter().map(|tr| {
Self::from_hir_with_diagnostics(db, resolver, type_ref_map, diagnostics, *tr)
});
Some(TyKind::Tuple(inner_tys.len(), inner_tys.collect()).intern())
}
TypeRef::Never => Some(TyKind::Never.intern()),
TypeRef::Array(inner) => {
let inner = Self::from_hir_with_diagnostics(
db,
resolver,
type_ref_map,
diagnostics,
*inner,
);
Some(TyKind::Array(inner).intern())
}
};
if let Some(ty) = res {
ty
} else {
diagnostics.push(LowerDiagnostic::UnresolvedType { id: type_ref });
TyKind::Unknown.intern()
}
}
/// Constructs a `Ty` from a path.
fn from_path(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref: LocalTypeRefId,
path: &Path,
diagnostics: &mut Vec<LowerDiagnostic>,
) -> Option<Self> {
// Find the type
let (ty, vis) = resolver.resolve_path_as_type_fully(db.upcast(), path)?;
// Get the definition and visibility
let def = match ty {
TypeNs::StructId(id) => TypableDef::Struct(id.into()),
TypeNs::TypeAliasId(id) => TypableDef::TypeAlias(id.into()),
TypeNs::PrimitiveType(id) => TypableDef::PrimitiveType(id),
};
// Get the current module and see if the type is visible from here
if let Some(module) = resolver.module() {
if !vis.is_visible_from(db, module) {
diagnostics.push(LowerDiagnostic::TypeIsPrivate { id: type_ref })
}
}
Some(db.type_for_def(def, Namespace::Types))
}
}
/// Resolves all types in the specified `TypeRefMap`.
pub fn lower_types(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
) -> Arc<LowerTyMap> {
let mut result = LowerTyMap::default();
for (id, _) in type_ref_map.iter() {
let ty =
Ty::from_hir_with_diagnostics(db, resolver, type_ref_map, &mut result.diagnostics, id);
result.type_ref_to_type.insert(id, ty);
}
Arc::new(result)
}
pub fn lower_struct_query(db: &dyn HirDatabase, s: Struct) -> Arc<LowerTyMap> {
let data = s.data(db.upcast());
lower_types(db, &s.id.resolver(db.upcast()), data.type_ref_map())
}
pub fn lower_type_alias_query(db: &dyn HirDatabase, t: TypeAlias) -> Arc<LowerTyMap> {
let data = t.data(db.upcast());
lower_types(db, &t.id.resolver(db.upcast()), data.type_ref_map())
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TypableDef {
Function(Function),
PrimitiveType(PrimitiveType),
Struct(Struct),
TypeAlias(TypeAlias),
}
impl From<Function> for TypableDef {
fn from(f: Function) -> Self {
TypableDef::Function(f)
}
}
impl From<PrimitiveType> for TypableDef {
fn from(f: PrimitiveType) -> Self {
TypableDef::PrimitiveType(f)
}
}
impl From<Struct> for TypableDef {
fn from(f: Struct) -> Self {
TypableDef::Struct(f)
}
}
impl From<ModuleDef> for Option<TypableDef> {
fn from(d: ModuleDef) -> Self {
match d {
ModuleDef::Function(f) => Some(TypableDef::Function(f)),
ModuleDef::PrimitiveType(t) => Some(TypableDef::PrimitiveType(t)),
ModuleDef::Struct(t) => Some(TypableDef::Struct(t)),
ModuleDef::TypeAlias(t) => Some(TypableDef::TypeAlias(t)),
ModuleDef::Module(_) => None,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CallableDef {
Function(Function),
Struct(Struct),
}
impl_froms!(CallableDef: Function, Struct);
impl CallableDef {
pub fn is_function(self) -> bool {
matches!(self, CallableDef::Function(_))
}
pub fn is_struct(self) -> bool {
matches!(self, CallableDef::Struct(_))
}
}
impl HasVisibility for CallableDef {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
match self {
CallableDef::Struct(strukt) => strukt.visibility(db),
CallableDef::Function(function) => function.visibility(db),
}
}
}
/// Build the declared type of an item. This depends on the namespace; e.g. for
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
/// the constructor function `(usize) -> Foo` which lives in the values
/// namespace.
pub(crate) fn type_for_def(db: &dyn HirDatabase, def: TypableDef, ns: Namespace) -> Ty {
match (def, ns) {
(TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f),
(TypableDef::PrimitiveType(t), Namespace::Types) => type_for_primitive(t),
(TypableDef::Struct(s), Namespace::Values) => type_for_struct_constructor(db, s),
(TypableDef::Struct(s), Namespace::Types) => type_for_struct(db, s),
(TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t),
// 'error' cases:
(TypableDef::Function(_), Namespace::Types) => TyKind::Unknown.intern(),
(TypableDef::PrimitiveType(_), Namespace::Values) => TyKind::Unknown.intern(),
(TypableDef::TypeAlias(_), Namespace::Values) => TyKind::Unknown.intern(),
}
}
/// Build the declared type of a static.
fn type_for_primitive(def: PrimitiveType) -> Ty {
match def {
PrimitiveType::Float(f) => TyKind::Float(f.into()),
PrimitiveType::Int(i) => TyKind::Int(i.into()),
PrimitiveType::Bool => TyKind::Bool,
}
.intern()
}
/// Build the declared type of a function. This should not need to look at the
/// function body.
fn type_for_fn(_db: &dyn HirDatabase, def: Function) -> Ty {
TyKind::FnDef(def.into(), Substitution::empty()).intern()
}
pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDef) -> FnSig {
match def {
CallableDef::Function(f) => fn_sig_for_fn(db, f),
CallableDef::Struct(s) => fn_sig_for_struct_constructor(db, s),
}
}
pub(crate) fn fn_sig_for_fn(db: &dyn HirDatabase, def: Function) -> FnSig {
let data = def.data(db.upcast());
let resolver = def.id.resolver(db.upcast());
let params = data
.params()
.iter()
.map(|tr| Ty::from_hir(db, &resolver, data.type_ref_map(), *tr).0)
.collect::<Vec<_>>();
let ret = Ty::from_hir(db, &resolver, data.type_ref_map(), *data.ret_type()).0;
FnSig::from_params_and_return(params, ret)
}
pub(crate) fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: Struct) -> FnSig {
let data = def.data(db.upcast());
let resolver = def.id.resolver(db.upcast());
let params = data
.fields
.iter()
.map(|(_, field)| Ty::from_hir(db, &resolver, data.type_ref_map(), field.type_ref).0)
.collect::<Vec<_>>();
let ret = type_for_struct(db, def);
FnSig::from_params_and_return(params, ret)
}
/// Build the type of a struct constructor.
fn type_for_struct_constructor(db: &dyn HirDatabase, def: Struct) -> Ty {
let struct_data = db.struct_data(def.id);
if struct_data.kind == StructKind::Tuple | {
TyKind::FnDef(def.into(), Substitution::empty()).intern()
} | conditional_block |
|
lower.rs | (),
diagnostics: vec![],
unknown_ty: TyKind::Unknown.intern(),
}
}
}
impl Index<LocalTypeRefId> for LowerTyMap {
type Output = Ty;
fn index(&self, expr: LocalTypeRefId) -> &Ty {
self.type_ref_to_type.get(expr).unwrap_or(&self.unknown_ty)
}
}
impl LowerTyMap {
/// Adds all the `LowerDiagnostic`s of the result to the `DiagnosticSink`.
pub(crate) fn add_diagnostics(
&self,
db: &dyn HirDatabase,
file_id: FileId,
source_map: &TypeRefSourceMap,
sink: &mut DiagnosticSink,
) {
self.diagnostics
.iter()
.for_each(|it| it.add_to(db, file_id, source_map, sink))
}
}
impl Ty {
/// Tries to lower a HIR type reference to an actual resolved type. Besides the type also
/// returns an diagnostics that where encountered along the way.
pub(crate) fn from_hir(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
type_ref: LocalTypeRefId,
) -> (Ty, Vec<diagnostics::LowerDiagnostic>) {
let mut diagnostics = Vec::new();
let ty =
Ty::from_hir_with_diagnostics(db, resolver, type_ref_map, &mut diagnostics, type_ref);
(ty, diagnostics)
}
/// Tries to lower a HIR type reference to an actual resolved type. Takes a mutable reference
/// to a `Vec` which will hold any diagnostics encountered a long the way.
fn from_hir_with_diagnostics(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
diagnostics: &mut Vec<LowerDiagnostic>,
type_ref: LocalTypeRefId,
) -> Ty {
let res = match &type_ref_map[type_ref] {
TypeRef::Path(path) => Ty::from_path(db, resolver, type_ref, path, diagnostics),
TypeRef::Error => Some(TyKind::Unknown.intern()),
TypeRef::Tuple(inner) => {
let inner_tys = inner.iter().map(|tr| {
Self::from_hir_with_diagnostics(db, resolver, type_ref_map, diagnostics, *tr)
});
Some(TyKind::Tuple(inner_tys.len(), inner_tys.collect()).intern())
}
TypeRef::Never => Some(TyKind::Never.intern()),
TypeRef::Array(inner) => {
let inner = Self::from_hir_with_diagnostics(
db,
resolver,
type_ref_map,
diagnostics,
*inner,
);
Some(TyKind::Array(inner).intern())
}
};
if let Some(ty) = res {
ty
} else {
diagnostics.push(LowerDiagnostic::UnresolvedType { id: type_ref });
TyKind::Unknown.intern()
}
}
/// Constructs a `Ty` from a path.
fn from_path(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref: LocalTypeRefId,
path: &Path,
diagnostics: &mut Vec<LowerDiagnostic>,
) -> Option<Self> {
// Find the type
let (ty, vis) = resolver.resolve_path_as_type_fully(db.upcast(), path)?;
// Get the definition and visibility
let def = match ty {
TypeNs::StructId(id) => TypableDef::Struct(id.into()),
TypeNs::TypeAliasId(id) => TypableDef::TypeAlias(id.into()),
TypeNs::PrimitiveType(id) => TypableDef::PrimitiveType(id),
};
// Get the current module and see if the type is visible from here
if let Some(module) = resolver.module() {
if !vis.is_visible_from(db, module) {
diagnostics.push(LowerDiagnostic::TypeIsPrivate { id: type_ref })
}
}
Some(db.type_for_def(def, Namespace::Types))
}
}
/// Resolves all types in the specified `TypeRefMap`.
pub fn lower_types(
db: &dyn HirDatabase,
resolver: &Resolver,
type_ref_map: &TypeRefMap,
) -> Arc<LowerTyMap> {
let mut result = LowerTyMap::default();
for (id, _) in type_ref_map.iter() {
let ty =
Ty::from_hir_with_diagnostics(db, resolver, type_ref_map, &mut result.diagnostics, id);
result.type_ref_to_type.insert(id, ty);
}
Arc::new(result)
}
pub fn lower_struct_query(db: &dyn HirDatabase, s: Struct) -> Arc<LowerTyMap> {
let data = s.data(db.upcast());
lower_types(db, &s.id.resolver(db.upcast()), data.type_ref_map())
}
pub fn lower_type_alias_query(db: &dyn HirDatabase, t: TypeAlias) -> Arc<LowerTyMap> {
let data = t.data(db.upcast());
lower_types(db, &t.id.resolver(db.upcast()), data.type_ref_map())
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TypableDef {
Function(Function),
PrimitiveType(PrimitiveType),
Struct(Struct),
TypeAlias(TypeAlias),
}
impl From<Function> for TypableDef {
fn from(f: Function) -> Self {
TypableDef::Function(f)
}
}
impl From<PrimitiveType> for TypableDef {
fn from(f: PrimitiveType) -> Self {
TypableDef::PrimitiveType(f)
}
}
impl From<Struct> for TypableDef {
fn from(f: Struct) -> Self {
TypableDef::Struct(f)
}
}
impl From<ModuleDef> for Option<TypableDef> {
fn from(d: ModuleDef) -> Self {
match d {
ModuleDef::Function(f) => Some(TypableDef::Function(f)),
ModuleDef::PrimitiveType(t) => Some(TypableDef::PrimitiveType(t)),
ModuleDef::Struct(t) => Some(TypableDef::Struct(t)),
ModuleDef::TypeAlias(t) => Some(TypableDef::TypeAlias(t)),
ModuleDef::Module(_) => None,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CallableDef {
Function(Function),
Struct(Struct),
}
impl_froms!(CallableDef: Function, Struct);
impl CallableDef {
pub fn is_function(self) -> bool {
matches!(self, CallableDef::Function(_))
}
pub fn | (self) -> bool {
matches!(self, CallableDef::Struct(_))
}
}
impl HasVisibility for CallableDef {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
match self {
CallableDef::Struct(strukt) => strukt.visibility(db),
CallableDef::Function(function) => function.visibility(db),
}
}
}
/// Build the declared type of an item. This depends on the namespace; e.g. for
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
/// the constructor function `(usize) -> Foo` which lives in the values
/// namespace.
pub(crate) fn type_for_def(db: &dyn HirDatabase, def: TypableDef, ns: Namespace) -> Ty {
match (def, ns) {
(TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f),
(TypableDef::PrimitiveType(t), Namespace::Types) => type_for_primitive(t),
(TypableDef::Struct(s), Namespace::Values) => type_for_struct_constructor(db, s),
(TypableDef::Struct(s), Namespace::Types) => type_for_struct(db, s),
(TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t),
// 'error' cases:
(TypableDef::Function(_), Namespace::Types) => TyKind::Unknown.intern(),
(TypableDef::PrimitiveType(_), Namespace::Values) => TyKind::Unknown.intern(),
(TypableDef::TypeAlias(_), Namespace::Values) => TyKind::Unknown.intern(),
}
}
/// Build the declared type of a static.
fn type_for_primitive(def: PrimitiveType) -> Ty {
match def {
PrimitiveType::Float(f) => TyKind::Float(f.into()),
PrimitiveType::Int(i) => TyKind::Int(i.into()),
PrimitiveType::Bool => TyKind::Bool,
}
.intern()
}
/// Build the declared type of a function. This should not need to look at the
/// function body.
fn type_for_fn(_db: &dyn HirDatabase, def: Function) -> Ty {
TyKind::FnDef(def.into(), Substitution::empty()).intern()
}
pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDef) -> FnSig {
match def {
CallableDef::Function(f) => fn_sig_for_fn(db, f),
CallableDef::Struct(s) => fn_sig_for_struct_constructor(db, s),
}
}
pub(crate) fn fn_sig_for_fn(db: &dyn HirDatabase, def: Function) -> FnSig {
let data = def.data(db.upcast());
let resolver = def.id.resolver(db.upcast());
let params = data
| is_struct | identifier_name |
index.ts | : message.message,
nickname: message.nickname
}
}
/**
* bitFlyerのチャットログを取り込む。
* @param fromDate 取り込む日時(YYYY-MM-DD) 日本時間
*/
async function importBitFlyerLogs(fromDate: string) {
console.log(`importBitFlyerLogs. fromDate=${fromDate}`)
const importDate = moment(fromDate)
// 保存済みかどうかのキャッシュファイルを取得
const bucket = storage.bucket()
const file = bucket.file(`cache.json`)
const cache: any = await file.download().then((data: any[]) => JSON.parse(data[0]))
// キャッシュデータの構造が存在しなければ初期化
cache['messageIds'] = cache['messageIds'] || {}
const cacheMessageIds = cache['messageIds'][fromDate] = cache['messageIds'][fromDate] || {}
// bitflyerからチャットログを取得する。
const fetchDate = importDate.clone().add(-1, 'days').tz('Asia/Tokyo').format('YYYY-MM-DD')
const bitFlyerApiEndpoint = `https://api.bitflyer.com/v1/getchats`
const messages: Array<BitFlyerChatMessage> = await fetch(`${bitFlyerApiEndpoint}?from_date=${fetchDate}`).then(res => res.json())
console.log(`messages.length=${messages.length}`)
// firestoreにデータを格納する。
// WriteBatchが500件ずつしか処理できないため、batchesに500件ずつ格納し、最後に一括で実行する。
const messagesColRef = firestore.collection('messages')
const batches: Array<WriteBatch> = []
let processCount = 0
const perProcess = 500 // WriteBatchの最大処理可能件数
for (const message of messages) {
processCount++
const batchIdx = Math.floor(processCount / perProcess)
console.log(`batchIdx=${batchIdx}`)
if (!batches[batchIdx]) {
batches[batchIdx] = firestore.batch()
}
const batch = batches[batchIdx]
const messageDocId = resolveMessageId(message)
// キャッシュをチェックし、存在すれば処理しない。
cacheMessageIds[messageDocId] = cacheMessageIds[messageDocId] || {}
if (cacheMessageIds[messageDocId].saved) {
console.log(`Already saved messageDocId=${messageDocId}`)
} else {
console.log(`Save messageDocId=${messageDocId}`)
const messageDocRef = messagesColRef.doc(messageDocId)
const messageDocData = convertMessageToDocData(message)
batch.set(messageDocRef, messageDocData)
cacheMessageIds[messageDocId].saved = true
}
}
console.log(`batches.length=${batches.length}`)
for (const batch of batches) {
await batch.commit()
}
//TODO messageIdsキャッシュの古い日付の物を削除する。
await file.save(JSON.stringify(cache))
return messages.length
}
/**
* bitflyerのログをStorageにアーカイブする。
* @param fromDate YYYY-MM-DD 日本時間の日付
*/
async function archiveBitFlyerLogs(fromDate: string) {
console.log(`archiveBitFlyerLogs. fromDate=${fromDate}`)
const archiveDate = moment(fromDate)
// bitflyerからチャットログを取得する。
const fetchDate = archiveDate.clone().add(-1, 'days').tz('Asia/Tokyo').format('YYYY-MM-DD')
const bitFlyerApiEndpoint = `https://api.bitflyer.com/v1/getchats`
const messages: Array<BitFlyerChatMessage> = await fetch(`${bitFlyerApiEndpoint}?from_date=${fetchDate}`).then(res => res.json())
console.log(`messages.length=${messages.length}`)
const archiveDateStr = archiveDate.format('YYYY-MM-DD')
const metadata: ArchiveMetadata = {
files: new Array<string>(), // ファイル一覧
messageNum: 0,
hours: {}, // 時間帯別のファイル一覧
}
let archiveMessages = new Array<BitFlyerChatMessage>()
let currentIdx = 0
let currentHour = '00'
let messageCount = 0
const perFileMessage = 1000
const fileIndexes: any = {}
for (const message of messages) {
if (!message.date.match(/Z$/)) {
// 日付の形式をISO8601に変換
message.date = `${message.date}Z`
}
// archiveDateStrは日本時間なのでメッセージ内の時間をAsia/Tokyoに変換
const date = moment(message.date).tz('Asia/Tokyo')
if (archiveDateStr !== date.format('YYYY-MM-DD')) {
continue
}
const fileHour = date.format("HH")
const fileIdx = Math.floor(messageCount / perFileMessage)
if (currentHour !== fileHour) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedArchiveMessageInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedArchiveMessageInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedArchiveMessageInfo.messageNum
currentHour = fileHour
currentIdx = 0
archiveMessages = []
} else if (currentIdx !== fileIdx) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedArchiveMessageInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedArchiveMessageInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedArchiveMessageInfo.messageNum
currentIdx = fileIdx
archiveMessages = []
}
archiveMessages.push(message)
messageCount++
}
if (archiveMessages.length > 0) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedInfo.messageNum
}
const savedMetadata = await saveArchiveMetadata(archiveDate, metadata)
return savedMetadata
}
/**
* Storageにアーカイブを保存する。
* @param date
* @param hour
* @param idx
* @param messages
*/
async function saveArchiveMessages(date: Moment, hour: string, idx: number, messages: Array<BitFlyerChatMessage>) {
const filename = `messages.h${hour}.${idx}.json`
const messageNum = messages.length
console.log(`saveArchiveMessages. filename=${filename}, messagesNum=${messageNum}`)
const bucket = storage.bucket()
const file = bucket.file(`/public/archives/${date.format('YYYY/MM/DD')}/${filename}`)
await file.save(JSON.stringify(messages), {
gzip: true,
contentType: 'application/json',
})
return {
filename: filename,
messageNum: messageNum,
}
}
/**
* Storageにアーカイブのメタデータを保存する。
* @param date
* @param metadata
*/
async function saveArchiveMetadata(date: Moment, metadata: ArchiveMetadata) {
const archiveMetadata: any = {
fi | ()
const file = bucket.file(`/public/archives/${date.format('YYYY/MM/DD')}/${filename}`)
await file.save(JSON.stringify(archiveMetadata), {
gzip: true,
contentType: 'application/json',
})
return archiveMetadata
}
/**
* 定期的にチャットログを取り込むためのスケジューラー
*/
export const scheduledImportLogs = functions
.runWith({
memory: '512MB'
})
.pubsub
.schedule('every 1 mins')
.onRun(async _ => {
const fromDate = moment().tz('Asia/T | les: [],
message_num: 0,
hours: {},
}
for (const hour in metadata.hours) {
if (metadata.hours.hasOwnProperty(hour)) {
const v = metadata.hours[hour]
archiveMetadata.files = archiveMetadata.files.concat(v.files)
archiveMetadata.message_num += v.messageNum
archiveMetadata.hours[hour] = {
files: v.files,
message_num: v.messageNum,
}
}
}
const filename = `metadata.json`
const bucket = storage.bucket | identifier_body |
index.ts | message: message.message,
nickname: message.nickname
}
}
/**
* bitFlyerのチャットログを取り込む。
* @param fromDate 取り込む日時(YYYY-MM-DD) 日本時間
*/
async function importBitFlyerLogs(fromDate: string) {
console.log(`importBitFlyerLogs. fromDate=${fromDate}`)
const importDate = moment(fromDate)
// 保存済みかどうかのキャッシュファイルを取得
const bucket = storage.bucket()
const file = bucket.file(`cache.json`)
const cache: any = await file.download().then((data: any[]) => JSON.parse(data[0]))
// キャッシュデータの構造が存在しなければ初期化
cache['messageIds'] = cache['messageIds'] || {}
const cacheMessageIds = cache['messageIds'][fromDate] = cache['messageIds'][fromDate] || {}
// bitflyerからチャットログを取得する。
const fetchDate = importDate.clone().add(-1, 'days').tz('Asia/Tokyo').format('YYYY-MM-DD')
const bitFlyerApiEndpoint = `https://api.bitflyer.com/v1/getchats`
const messages: Array<BitFlyerChatMessage> = await fetch(`${bitFlyerApiEndpoint}?from_date=${fetchDate}`).then(res => res.json())
console.log(`messages.length=${messages.length}`)
// firestoreにデータを格納する。
// WriteBatchが500件ずつしか処理できないため、batchesに500件ずつ格納し、最後に一括で実行する。
const messagesColRef = firestore.collection('messages')
const batches: Array<WriteBatch> = []
let processCount = 0
const perProcess = 500 // WriteBatchの最大処理可能件数
for (const message of messages) {
processCount++
const batchIdx = Math.floor(processCount / perProcess)
console.log(`batchIdx=${batchIdx}`)
if (!batches[batchIdx]) {
batches[batchIdx] = firestore.batch()
}
const batch = batches[batchIdx]
const messageDocId = resolveMessageId(message)
// キャッシュをチェックし、存在すれば処理しない。
cacheMessageIds[messageDocId] = cacheMessageIds[messageDocId] || {}
if (cacheMessageIds[messageDocId].saved) {
console.log(`Already saved messageDocId=${messageDocId}`)
} else {
console.log(`Save messageDocId=${messageDocId}`)
const messageDocRef = messagesColRef.doc(messageDocId)
const messageDocData = convertMessageToDocData(message)
batch.set(messageDocRef, messageDocData)
cacheMessageIds[messageDocId].saved = true
}
}
console.log(`batches.length=${batches.length}`)
for (const batch of batches) {
await batch.commit()
}
//TODO messageIdsキャッシュの古い日付の物を削除する。
| moment(fromDate)
// bitflyerからチャットログを取得する。
const fetchDate = archiveDate.clone().add(-1, 'days').tz('Asia/Tokyo').format('YYYY-MM-DD')
const bitFlyerApiEndpoint = `https://api.bitflyer.com/v1/getchats`
const messages: Array<BitFlyerChatMessage> = await fetch(`${bitFlyerApiEndpoint}?from_date=${fetchDate}`).then(res => res.json())
console.log(`messages.length=${messages.length}`)
const archiveDateStr = archiveDate.format('YYYY-MM-DD')
const metadata: ArchiveMetadata = {
files: new Array<string>(), // ファイル一覧
messageNum: 0,
hours: {}, // 時間帯別のファイル一覧
}
let archiveMessages = new Array<BitFlyerChatMessage>()
let currentIdx = 0
let currentHour = '00'
let messageCount = 0
const perFileMessage = 1000
const fileIndexes: any = {}
for (const message of messages) {
if (!message.date.match(/Z$/)) {
// 日付の形式をISO8601に変換
message.date = `${message.date}Z`
}
// archiveDateStrは日本時間なのでメッセージ内の時間をAsia/Tokyoに変換
const date = moment(message.date).tz('Asia/Tokyo')
if (archiveDateStr !== date.format('YYYY-MM-DD')) {
continue
}
const fileHour = date.format("HH")
const fileIdx = Math.floor(messageCount / perFileMessage)
if (currentHour !== fileHour) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedArchiveMessageInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedArchiveMessageInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedArchiveMessageInfo.messageNum
currentHour = fileHour
currentIdx = 0
archiveMessages = []
} else if (currentIdx !== fileIdx) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedArchiveMessageInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedArchiveMessageInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedArchiveMessageInfo.messageNum
currentIdx = fileIdx
archiveMessages = []
}
archiveMessages.push(message)
messageCount++
}
if (archiveMessages.length > 0) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedInfo.messageNum
}
const savedMetadata = await saveArchiveMetadata(archiveDate, metadata)
return savedMetadata
}
/**
* Storageにアーカイブを保存する。
* @param date
* @param hour
* @param idx
* @param messages
*/
async function saveArchiveMessages(date: Moment, hour: string, idx: number, messages: Array<BitFlyerChatMessage>) {
const filename = `messages.h${hour}.${idx}.json`
const messageNum = messages.length
console.log(`saveArchiveMessages. filename=${filename}, messagesNum=${messageNum}`)
const bucket = storage.bucket()
const file = bucket.file(`/public/archives/${date.format('YYYY/MM/DD')}/${filename}`)
await file.save(JSON.stringify(messages), {
gzip: true,
contentType: 'application/json',
})
return {
filename: filename,
messageNum: messageNum,
}
}
/**
* Storageにアーカイブのメタデータを保存する。
* @param date
* @param metadata
*/
async function saveArchiveMetadata(date: Moment, metadata: ArchiveMetadata) {
const archiveMetadata: any = {
files: [],
message_num: 0,
hours: {},
}
for (const hour in metadata.hours) {
if (metadata.hours.hasOwnProperty(hour)) {
const v = metadata.hours[hour]
archiveMetadata.files = archiveMetadata.files.concat(v.files)
archiveMetadata.message_num += v.messageNum
archiveMetadata.hours[hour] = {
files: v.files,
message_num: v.messageNum,
}
}
}
const filename = `metadata.json`
const bucket = storage.bucket()
const file = bucket.file(`/public/archives/${date.format('YYYY/MM/DD')}/${filename}`)
await file.save(JSON.stringify(archiveMetadata), {
gzip: true,
contentType: 'application/json',
})
return archiveMetadata
}
/**
* 定期的にチャットログを取り込むためのスケジューラー
*/
export const scheduledImportLogs = functions
.runWith({
memory: '512MB'
})
.pubsub
.schedule('every 1 mins')
.onRun(async _ => {
const fromDate = moment().tz('Asia/T | await file.save(JSON.stringify(cache))
return messages.length
}
/**
* bitflyerのログをStorageにアーカイブする。
* @param fromDate YYYY-MM-DD 日本時間の日付
*/
async function archiveBitFlyerLogs(fromDate: string) {
console.log(`archiveBitFlyerLogs. fromDate=${fromDate}`)
const archiveDate = | conditional_block |
index.ts | : message.message,
nickname: message.nickname
}
}
/**
* bitFlyerのチャットログを取り込む。
* @param fromDate 取り込む日時(YYYY-MM-DD) 日本時間
*/
async function importBitFlyerLogs(fromDate: string) {
console.log(`importBitFlyerLogs. fromDate=${fromDate}`)
const importDate = moment(fromDate)
// 保存済みかどうかのキャッシュファイルを取得
const bucket = storage.bucket()
const file = bucket.file(`cache.json`)
const cache: any = await file.download().then((data: any[]) => JSON.parse(data[0]))
// キャッシュデータの構造が存在しなければ初期化
cache['messageIds'] = cache['messageIds'] || {}
const cacheMessageIds = cache['messageIds'][fromDate] = cache['messageIds'][fromDate] || {}
// bitflyerからチャットログを取得する。
const fetchDate = importDate.clone().add(-1, 'days').tz('Asia/Tokyo').format('YYYY-MM-DD')
const bitFlyerApiEndpoint = `https://api.bitflyer.com/v1/getchats`
const messages: Array<BitFlyerChatMessage> = await fetch(`${bitFlyerApiEndpoint}?from_date=${fetchDate}`).then(res => res.json())
console.log(`messages.length=${messages.length}`)
// firestoreにデータを格納する。
// WriteBatchが500件ずつしか処理できないため、batchesに500件ずつ格納し、最後に一括で実行する。
const messagesColRef = firestore.collection('messages')
const batches: Array<WriteBatch> = []
let processCount = 0
const perProcess = 500 // WriteBatchの最大処理可能件数
for (const message of messages) {
processCount++
const batchIdx = Math.floor(processCount / perProcess)
console.log(`batchIdx=${batchIdx}`)
if (!batches[batchIdx]) {
batches[batchIdx] = firestore.batch()
}
const batch = batches[batchIdx]
const messageDocId = resolveMessageId(message)
// キャッシュをチェックし、存在すれば処理しない。
cacheMessageIds[messageDocId] = cacheMessageIds[messageDocId] || {}
if (cacheMessageIds[messageDocId].saved) {
console.log(`Already saved messageDocId=${messageDocId}`)
} else {
console.log(`Save messageDocId=${messageDocId}`)
const messageDocRef = messagesColRef.doc(messageDocId)
const messageDocData = convertMessageToDocData(message)
batch.set(messageDocRef, messageDocData)
cacheMessageIds[messageDocId].saved = true
}
}
console.log(`batches.length=${batches.length}`)
for (const batch of batches) {
await batch.commit()
}
//TODO messageIdsキャッシュの古い日付の物を削除する。
await file.save(JSON.stringify(cache))
return messages.length
}
/**
* bitflyerのログをStorageにアーカイブする。
* @param fromDate YYYY-MM-DD 日本時間の日付
*/
async function archiveBitFlyerLogs(fromDate: string) {
console.log(`archiveBitFlyerLogs. fromDate=${fromDate}`)
const archiveDate = moment(fromDate)
// bitflyerからチャットログを取得する。
const fetchDate = archiveDate.clone().add(-1, 'days').tz('Asia/Tokyo').format('YYYY-MM-DD')
const bitFlyerApiEndpoint = `https://api.bitflyer.com/v1/getchats`
const messages: Array<BitFlyerChatMessage> = await fetch(`${bitFlyerApiEndpoint}?from_date=${fetchDate}`).then(res => res.json())
console.log(`messages.length=${messages.length}`)
const archiveDateStr = archiveDate.format('YYYY-MM-DD')
const metadata: ArchiveMetadata = {
files: new Array<string>(), // ファイル一覧
messageNum: 0,
hours: {}, // 時間帯別のファイル一覧
}
let archiveMessages = new Array<BitFlyerChatMessage>()
let currentIdx = 0
let currentHour = '00'
let messageCount = 0
const perFileMessage = 1000
const fileIndexes: any = {}
for (const message of messages) {
if (!message.date.match(/Z$/)) {
// 日付の形式をISO8601に変換
message.date = `${message.date}Z`
}
// archiveDateStrは日本時間なのでメッセージ内の時間をAsia/Tokyoに変換
const date = moment(message.date).tz('Asia/Tokyo')
if (archiveDateStr !== date.format('YYYY-MM-DD')) {
continue
}
const fileHour = date.format("HH")
const fileIdx = Math.floor(messageCount / perFileMessage)
if (currentHour !== fileHour) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedArchiveMessageInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedArchiveMessageInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedArchiveMessageInfo.messageNum
currentHour = fileHour
currentIdx = 0
archiveMessages = []
} else if (currentIdx !== fileIdx) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedArchiveMessageInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedArchiveMessageInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedArchiveMessageInfo.messageNum
currentIdx = fileIdx
archiveMessages = []
}
archiveMessages.push(message)
messageCount++
}
if (archiveMessages.length > 0) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedInfo.messageNum
}
const savedMetadata = await saveArchiveMetadata(archiveDate, metadata)
return savedMetadata
}
/**
* Storageにアーカイブを保存する。
* @param date
* @param hour
* @param idx
* @param messages
*/
async function saveArchiveMessages(date: Moment, hour: string, idx: number, messages: Array<BitFlyerChatMessage>) {
const filename = `messages.h${hour}.${idx}.json`
const messageNum = messages.length
console.log(`saveArchiveMessages. filename=${filename}, messagesNum=${messageNum}`)
const bucket = storage.bucket()
const file = bucket.file(`/public/archives/${date.format('YYYY/MM/DD')}/${filename}`)
await file.save(JSON.stringify(messages), {
gzip: true,
contentType: 'application/json',
})
return {
filename: filename,
messageNum: messageNum,
}
}
/**
* Storageにアーカイブのメタデータを保存する。
* @param date
* @param metadata
*/
async function sav | te: Moment, metadata: ArchiveMetadata) {
const archiveMetadata: any = {
files: [],
message_num: 0,
hours: {},
}
for (const hour in metadata.hours) {
if (metadata.hours.hasOwnProperty(hour)) {
const v = metadata.hours[hour]
archiveMetadata.files = archiveMetadata.files.concat(v.files)
archiveMetadata.message_num += v.messageNum
archiveMetadata.hours[hour] = {
files: v.files,
message_num: v.messageNum,
}
}
}
const filename = `metadata.json`
const bucket = storage.bucket()
const file = bucket.file(`/public/archives/${date.format('YYYY/MM/DD')}/${filename}`)
await file.save(JSON.stringify(archiveMetadata), {
gzip: true,
contentType: 'application/json',
})
return archiveMetadata
}
/**
* 定期的にチャットログを取り込むためのスケジューラー
*/
export const scheduledImportLogs = functions
.runWith({
memory: '512MB'
})
.pubsub
.schedule('every 1 mins')
.onRun(async _ => {
const fromDate = moment().tz('Asia | eArchiveMetadata(da | identifier_name |
index.ts | 0]))
// キャッシュデータの構造が存在しなければ初期化
cache['messageIds'] = cache['messageIds'] || {}
const cacheMessageIds = cache['messageIds'][fromDate] = cache['messageIds'][fromDate] || {}
// bitflyerからチャットログを取得する。
const fetchDate = importDate.clone().add(-1, 'days').tz('Asia/Tokyo').format('YYYY-MM-DD')
const bitFlyerApiEndpoint = `https://api.bitflyer.com/v1/getchats`
const messages: Array<BitFlyerChatMessage> = await fetch(`${bitFlyerApiEndpoint}?from_date=${fetchDate}`).then(res => res.json())
console.log(`messages.length=${messages.length}`)
// firestoreにデータを格納する。
// WriteBatchが500件ずつしか処理できないため、batchesに500件ずつ格納し、最後に一括で実行する。
const messagesColRef = firestore.collection('messages')
const batches: Array<WriteBatch> = []
let processCount = 0
const perProcess = 500 // WriteBatchの最大処理可能件数
for (const message of messages) {
processCount++
const batchIdx = Math.floor(processCount / perProcess)
console.log(`batchIdx=${batchIdx}`)
if (!batches[batchIdx]) {
batches[batchIdx] = firestore.batch()
}
const batch = batches[batchIdx]
const messageDocId = resolveMessageId(message)
// キャッシュをチェックし、存在すれば処理しない。
cacheMessageIds[messageDocId] = cacheMessageIds[messageDocId] || {}
if (cacheMessageIds[messageDocId].saved) {
console.log(`Already saved messageDocId=${messageDocId}`)
} else {
console.log(`Save messageDocId=${messageDocId}`)
const messageDocRef = messagesColRef.doc(messageDocId)
const messageDocData = convertMessageToDocData(message)
batch.set(messageDocRef, messageDocData)
cacheMessageIds[messageDocId].saved = true
}
}
console.log(`batches.length=${batches.length}`)
for (const batch of batches) {
await batch.commit()
}
//TODO messageIdsキャッシュの古い日付の物を削除する。
await file.save(JSON.stringify(cache))
return messages.length
}
/**
* bitflyerのログをStorageにアーカイブする。
* @param fromDate YYYY-MM-DD 日本時間の日付
*/
async function archiveBitFlyerLogs(fromDate: string) {
console.log(`archiveBitFlyerLogs. fromDate=${fromDate}`)
const archiveDate = moment(fromDate)
// bitflyerからチャットログを取得する。
const fetchDate = archiveDate.clone().add(-1, 'days').tz('Asia/Tokyo').format('YYYY-MM-DD')
const bitFlyerApiEndpoint = `https://api.bitflyer.com/v1/getchats`
const messages: Array<BitFlyerChatMessage> = await fetch(`${bitFlyerApiEndpoint}?from_date=${fetchDate}`).then(res => res.json())
console.log(`messages.length=${messages.length}`)
const archiveDateStr = archiveDate.format('YYYY-MM-DD')
const metadata: ArchiveMetadata = {
files: new Array<string>(), // ファイル一覧
messageNum: 0,
hours: {}, // 時間帯別のファイル一覧
}
let archiveMessages = new Array<BitFlyerChatMessage>()
let currentIdx = 0
let currentHour = '00'
let messageCount = 0
const perFileMessage = 1000
const fileIndexes: any = {}
for (const message of messages) {
if (!message.date.match(/Z$/)) {
// 日付の形式をISO8601に変換
message.date = `${message.date}Z`
}
// archiveDateStrは日本時間なのでメッセージ内の時間をAsia/Tokyoに変換
const date = moment(message.date).tz('Asia/Tokyo')
if (archiveDateStr !== date.format('YYYY-MM-DD')) {
continue
}
const fileHour = date.format("HH")
const fileIdx = Math.floor(messageCount / perFileMessage)
if (currentHour !== fileHour) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedArchiveMessageInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedArchiveMessageInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedArchiveMessageInfo.messageNum
currentHour = fileHour
currentIdx = 0
archiveMessages = []
} else if (currentIdx !== fileIdx) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedArchiveMessageInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedArchiveMessageInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedArchiveMessageInfo.messageNum
currentIdx = fileIdx
archiveMessages = []
}
archiveMessages.push(message)
messageCount++
}
if (archiveMessages.length > 0) {
fileIndexes[currentHour] = fileIndexes[currentHour] === undefined ? -1 : fileIndexes[currentHour]
fileIndexes[currentHour] += 1
const savedInfo = await saveArchiveMessages(archiveDate, currentHour, fileIndexes[currentHour], archiveMessages)
metadata.hours[`h${currentHour}`] = metadata.hours[`h${currentHour}`] || { files: [], messageNum: 0 }
metadata.hours[`h${currentHour}`].files.push(savedInfo.filename)
metadata.hours[`h${currentHour}`].messageNum += savedInfo.messageNum
}
const savedMetadata = await saveArchiveMetadata(archiveDate, metadata)
return savedMetadata
}
/**
* Storageにアーカイブを保存する。
* @param date
* @param hour
* @param idx
* @param messages
*/
async function saveArchiveMessages(date: Moment, hour: string, idx: number, messages: Array<BitFlyerChatMessage>) {
const filename = `messages.h${hour}.${idx}.json`
const messageNum = messages.length
console.log(`saveArchiveMessages. filename=${filename}, messagesNum=${messageNum}`)
const bucket = storage.bucket()
const file = bucket.file(`/public/archives/${date.format('YYYY/MM/DD')}/${filename}`)
await file.save(JSON.stringify(messages), {
gzip: true,
contentType: 'application/json',
})
return {
filename: filename,
messageNum: messageNum,
}
}
/**
* Storageにアーカイブのメタデータを保存する。
* @param date
* @param metadata
*/
async function saveArchiveMetadata(date: Moment, metadata: ArchiveMetadata) {
const archiveMetadata: any = {
files: [],
message_num: 0,
hours: {},
}
for (const hour in metadata.hours) {
if (metadata.hours.hasOwnProperty(hour)) {
const v = metadata.hours[hour]
archiveMetadata.files = archiveMetadata.files.concat(v.files)
archiveMetadata.message_num += v.messageNum
archiveMetadata.hours[hour] = {
files: v.files,
message_num: v.messageNum,
}
}
}
const filename = `metadata.json`
const bucket = storage.bucket()
const file = bucket.file(`/public/archives/${date.format('YYYY/MM/DD')}/${filename}`)
await file.save(JSON.stringify(archiveMetadata), {
gzip: true,
contentType: 'application/json',
})
return archiveMetadata
}
/**
* 定期的にチャットログを取り込むためのスケジューラー
*/
export const scheduledImportLogs = functions
.runWith({
memory: '512MB'
})
.pubsub
.schedule('every 1 mins')
.onRun(async _ => {
const fromDate = moment().tz('Asia/Tokyo').format('YYYY-MM-DD')
const concurrency = 1
const promisePool = new PromisePool(() => importBitFlyerLogs(fromDate), concurrency)
await promisePool.start();
console.log(`Imported messages by schedule. fromDate=${fromDate}`)
});
/**
* チャットログを取り込むFunctions
*/
//export const importLogs = functions.https.onRequest(async (request, response) => {
// if (request.method !== 'POST') {
// response.status(400).send(`Please use POST method.`)
// return | // }
//
// let fromDate = request.query.from_date || '' | random_line_split |
|
goendpoints.go | volatile ("MRC p15, 0, %0, c9, c13, 0\t\n": "=r"(value));
// return value;
// }
//
// void init_perfcounters (int32_t do_reset, int32_t enable_divider)
// {
// // in general enable all counters (including cycle counter)
// int32_t value = 1;
//
// // peform reset:
// if (do_reset)
// {
// value |= 2; // reset all counters to zero.
// value |= 4; // reset cycle counter to zero.
// }
//
// if (enable_divider)
// value |= 8; // enable "by 64" divider for CCNT.
//
// value |= 16;
//
// // program the performance-counter control-register:
// asm volatile ("MCR p15, 0, %0, c9, c12, 0\t\n" :: "r"(value));
//
// // enable all counters:
// asm volatile ("MCR p15, 0, %0, c9, c12, 1\t\n" :: "r"(0x8000000f));
//
// // clear overflows:
// asm volatile ("MCR p15, 0, %0, c9, c12, 3\t\n" :: "r"(0x8000000f));
// }
import "C"
import "fmt"
import "os"
import "syscall"
import "unsafe"
import "time"
import "strconv"
import "strings"
import "sort"
import "log"
var timeStampLayout = "02:Jan:2006:15:04:05.000000"
var samples = 100
func openPort(name string) (f *os.File, err error) {
f, err = os.OpenFile(name, syscall.O_RDWR|syscall.O_NOCTTY, 0666)
if err != nil {
return nil, err
}
defer func() {
if err != nil && f != nil {
f.Close()
}
}()
fd := f.Fd()
// Set serial port 'name' to 115200/8/N/1 in RAW mode (i.e. no pre-process of received data
// and pay special attention to Cc field, this tells the serial port to not return until at
// at least syscall.VMIN bytes have been read. This is a tunable parameter they may help in Lab 3
t := syscall.Termios{
Iflag: syscall.IGNPAR,
Cflag: syscall.CS8 | syscall.CREAD | syscall.CLOCAL | syscall.B115200,
Cc: [32]uint8{syscall.VMIN: 27},
Ispeed: syscall.B115200,
Ospeed: syscall.B115200,
}
// Syscall to apply these parameters
_, _, errno := syscall.Syscall6(
syscall.SYS_IOCTL,
uintptr(fd),
uintptr(syscall.TCSETS),
uintptr(unsafe.Pointer(&t)),
0,
0,
0,
)
if errno != 0 {
return nil, errno
}
return f, nil
}
type int64arr []int64
func (a int64arr) Len() int { return len(a) }
func (a int64arr) Swap(i, j int){ a[i], a[j] = a[j], a[i] }
func (a int64arr) Less(i, j int) bool { return a[i] < a[j] }
func generateTimestamp() string {
t := time.Now()
return t.Format(timeStampLayout)
}
func parseTimestamp(timestamp string) time.Time {
t, e := time.Parse(timeStampLayout, timestamp)
if e != nil {
fmt.Printf("Parse error occured: %v\n", e)
}
return t
}
func calculateDelayRTT(p0 string, p1 string, p2 string, p3 string) int64 {
// parse time stamp string
t0 := parseTimestamp(p0)
t1 := parseTimestamp(p1)
t2 := parseTimestamp(p2)
t3 := parseTimestamp(p3)
delayRTT := (t3.Sub(t0) + t2.Sub(t1))
//fmt.Printf("RTT delay: %v\n", delayRTT)
return int64(delayRTT.Nanoseconds())
}
func exchangeTimestamps() int64 {
// client = coordinator, server = endpoints. We are server
// t0 is the client's timestamp of the request packet transmission,
// t1 is the server's timestamp of the request packet reception,
t0 := C.GoString(C.readMsg())
t1 := generateTimestamp()
// t2 is the server's timestamp of the response packet transmission and
// t3 is the client's timestamp of the response packet reception.
t2 := generateTimestamp()
C.writeMsg(C.CString(t2))
t3 := C.GoString(C.readMsg())
// gotta send t1
C.writeMsg(C.CString(t1))
//calculateClockOffset(t0, t1, t2, t3)
return calculateDelayRTT(t0, t1, t2, t3)
}
func readMessage(f *os.File) string {
buffer := make([]byte, 27)
_, err := f.Read(buffer)
if err != nil {
fmt.Printf("Read error occured: %v\n", err)
}
//fmt.Printf("Received %d bytes: %s\n", count, string(buffer))
return string(buffer)
}
func writeMessage(f *os.File, str string) {
buffer := []byte(str)
_, err := f.Write(buffer)
if err != nil {
fmt.Printf("Write error occured: %v\n", err)
}
//fmt.Printf("Sent %d bytes: %s\n", count, str)
}
func waitTimer(cycles int64, f *os.File) int {
// init counters:
C.init_perfcounters(1, 0)
//fmt.Printf("cyles to wait: %d\n", cycles)
timeStart := C.ulonglong(C.get_cyclecount())
timeElapsed := C.ulonglong(0)
for {
timeElapsed = C.ulonglong(C.get_cyclecount()) - timeStart
if timeElapsed > C.ulonglong(cycles) {
writeMessage(f, fmt.Sprintf("%27s", "COMPLETE"))
break
}
}
return int(timeElapsed)
}
func priority_test(f *os.File, delay int64) {
rtt := delay
str := readMessage(f)
cycles, err := strconv.ParseInt(strings.TrimSpace(str), 0, 64)
if err != nil {
fmt.Println(err)
}
fmt.Printf("readMessage string: %s\n", str)
fmt.Printf("rtt: %d\n", rtt)
fmt.Printf("cycles: %d\n", cycles)
waitTimer(cycles-rtt-rtt, f)
//writeMessage(f, fmt.Sprintf("%27s", "COMPLETE"))
}
func calculateDelayJitter() (aDelay int64, aJitter int64) {
samples := 100
delayEntries := make([]int64, samples)
// Delay/Jitter Calculations
//totalDelay := uint64(0)
for i := 0; i < samples; i++ {
delayEntries[i] = exchangeTimestamps()
log.Printf("%v\n", i)
//totalDelay += delayEntries[i]
}
//avgdelay := totalDelay / uint64(samples)
sort.Sort(int64arr(delayEntries))
avgdelay := (delayEntries[49] + delayEntries[50])/2
totaljitter := int64(0)
jitter := int64(0)
extremes := 0
jitterEntries := make([]int64, samples)
for i := 0; i < samples; i++ {
if delayEntries[i] > 300000000 {
extremes += 1
continue
}
if delayEntries[i] > avgdelay {
jitter = delayEntries[i] - avgdelay
} else {
jitter = avgdelay - delayEntries[i]
}
jitterEntries[i] = jitter;
//fmt.Printf("Cur Jitter = %d \n", jitter)
totaljitter += jitter
}
sort.Sort(int64arr(jitterEntries))
samples -= extremes
avgjitter := totaljitter / int64(samples)
fmt.Printf("----------------------Delay Entries----------------------------------\n");
for i := 0; i < samples; i++ {
fmt.Printf("%v\n", delayEntries[i]);
}
fmt.Printf("----------------------Jitter Entries---------------------------------\n");
for i := 0; i < samples; i++ {
fmt.Printf("%v\n", jitterEntries[i]);
}
fmt.Printf("----------------------Final Statistics--------------------------------\n");
fmt.Printf("median delay = %v \n", avgdelay);
fmt.Printf("average jitter = %v \n", avgjitter);
fmt.Printf("Num outliers = %v \n", extremes)
return int64(avgdelay), int64(avgjitter)
}
func | main | identifier_name |
|
goendpoints.go | _GetDeviceInfo succeeded. Device is type %d.\n",
// (int)ftDevice);
//
// /* MUST set Signature1 and 2 before calling FT_EE_Read */
// Data.Signature1 = 0x00000000;
// Data.Signature2 = 0xffffffff;
// Data.Manufacturer = (char *)malloc(256); /* E.g "FTDI" */
// Data.ManufacturerId = (char *)malloc(256); /* E.g. "FT" */
// Data.Description = (char *)malloc(256); /* E.g. "USB HS Serial Converter" */
// Data.SerialNumber = (char *)malloc(256); /* E.g. "FT000001" if fixed, or NULL */
// if (Data.Manufacturer == NULL ||
// Data.ManufacturerId == NULL ||
// Data.Description == NULL ||
// Data.SerialNumber == NULL)
// {
// printf("Failed to allocate memory.\n");
// retCode = 1;
// goto exit;
// }
//
// ftStatus = FT_EE_Read(ftHandle0, &Data);
// if(ftStatus != FT_OK) {
// printf("FT_EE_Read failed\n");
// retCode = 1;
// goto exit;
// }
//
// printf("FT_EE_Read succeeded.\n\n");
//
// printf("Signature1 = %d\n", (int)Data.Signature1);
// printf("Signature2 = %d\n", (int)Data.Signature2);
// printf("Version = %d\n", (int)Data.Version);
//
// printf("VendorId = 0x%04X\n", Data.VendorId);
// printf("ProductId = 0x%04X\n", Data.ProductId);
// printf("Manufacturer = %s\n", Data.Manufacturer);
// printf("ManufacturerId = %s\n", Data.ManufacturerId);
// printf("Description = %s\n", Data.Description);
// printf("SerialNumber = %s\n", Data.SerialNumber);
// printf("MaxPower = %d\n", Data.MaxPower);
// printf("PnP = %d\n", Data.PnP) ;
// printf("SelfPowered = %d\n", Data.SelfPowered);
// printf("RemoteWakeup = %d\n", Data.RemoteWakeup);
// if (ftDevice == FT_DEVICE_232R)
// {
// /* Rev 6 (FT232R) extensions */
// printf("232R:\n");
// printf("-----\n");
// printf("\tUseExtOsc = 0x%X\n", Data.UseExtOsc); // Use External Oscillator
// printf("\tHighDriveIOs = 0x%X\n", Data.HighDriveIOs); // High Drive I/Os
// printf("\tEndpointSize = 0x%X\n", Data.EndpointSize); // Endpoint size
//
// printf("\tPullDownEnableR = 0x%X\n", Data.PullDownEnableR); // non-zero if pull down enabled
// printf("\tSerNumEnableR = 0x%X\n", Data.SerNumEnableR); // non-zero if serial number to be used
//
// printf("\tInvertTXD = 0x%X\n", Data.InvertTXD); // non-zero if invert TXD
// printf("\tInvertRXD = 0x%X\n", Data.InvertRXD); // non-zero if invert RXD
// printf("\tInvertRTS = 0x%X\n", Data.InvertRTS); // non-zero if invert RTS
// printf("\tInvertCTS = 0x%X\n", Data.InvertCTS); // non-zero if invert CTS
// printf("\tInvertDTR = 0x%X\n", Data.InvertDTR); // non-zero if invert DTR
// printf("\tInvertDSR = 0x%X\n", Data.InvertDSR); // non-zero if invert DSR
// printf("\tInvertDCD = 0x%X\n", Data.InvertDCD); // non-zero if invert DCD
// printf("\tInvertRI = 0x%X\n", Data.InvertRI); // non-zero if invert RI
//
// printf("\tCbus0 = 0x%X\n", Data.Cbus0); // Cbus Mux control
// printf("\tCbus1 = 0x%X\n", Data.Cbus1); // Cbus Mux control
// printf("\tCbus2 = 0x%X\n", Data.Cbus2); // Cbus Mux control
// printf("\tCbus3 = 0x%X\n", Data.Cbus3); // Cbus Mux control
// printf("\tCbus4 = 0x%X\n", Data.Cbus4); // Cbus Mux control
//
// printf("\tRIsD2XX = 0x%X\n", Data.RIsD2XX); // non-zero if using D2XX
// }
// ftStatus = FT_SetBaudRate(ftHandle0, 115200); // Set baud rate to 115200
// if (ftStatus == FT_OK) {
// printf("FT_SetBaudRate OK\n");
// }
// else {
// printf("FT_SetBaudRate Failed\n");
// }
// ftStatus = FT_SetFlowControl(ftHandle0, FT_FLOW_RTS_CTS, 0x11, 0x13);
// if (ftStatus == FT_OK) {
// printf("FT_SetFlowControl OK\n");
// }
// else {
// printf("FT_SetFlowControl Failed\n");
// }
// UCHAR LatencyTimer = 1;
// ftStatus = FT_SetLatencyTimer(ftHandle0, LatencyTimer );
// if (ftStatus == FT_OK) {
// printf("Set LatencyTimer: %u\n", LatencyTimer );
// }
// else {
// printf("FT_SetLatencyTimer failed\n");
// retCode = 1;
// goto exit;
// }
// DWORD TransferSize = 64;
// ftStatus = FT_SetUSBParameters(ftHandle0, TransferSize, TransferSize);
// if (ftStatus == FT_OK) {
// printf("In/Out transfer size set to 64 bytes\n");
// }
// else {
// printf("FT_SetUSBParameters failed\n");
// retCode = 1;
// goto exit;
// }
// FT_SetDataCharacteristics(ftHandle0, FT_BITS_8, FT_STOP_BITS_1, FT_PARITY_NONE);
//
//
//exit:
// free(Data.Manufacturer);
// free(Data.ManufacturerId);
// free(Data.Description);
// free(Data.SerialNumber);
// printf("Returning %d\n", retCode);
// return retCode;
//}
//
// unsigned int get_cyclecount (void)
// {
// unsigned int value;
// // Read CCNT Register
// asm volatile ("MRC p15, 0, %0, c9, c13, 0\t\n": "=r"(value));
// return value;
// }
//
// void init_perfcounters (int32_t do_reset, int32_t enable_divider)
// {
// // in general enable all counters (including cycle counter)
// int32_t value = 1;
//
// // peform reset:
// if (do_reset)
// {
// value |= 2; // reset all counters to zero.
// value |= 4; // reset cycle counter to zero.
// }
//
// if (enable_divider)
// value |= 8; // enable "by 64" divider for CCNT.
//
// value |= 16;
//
// // program the performance-counter control-register:
// asm volatile ("MCR p15, 0, %0, c9, c12, 0\t\n" :: "r"(value));
//
// // enable all counters:
// asm volatile ("MCR p15, 0, %0, c9, c12, 1\t\n" :: "r"(0x8000000f));
//
// // clear overflows:
// asm volatile ("MCR p15, 0, %0, c9, c12, 3\t\n" :: "r"(0x8000000f));
// }
import "C"
import "fmt"
import "os"
import "syscall"
import "unsafe"
import "time"
import "strconv"
import "strings"
import "sort"
import "log"
var timeStampLayout = "02:Jan:2006:15:04:05.000000"
var samples = 100
func openPort(name string) (f *os.File, err error) {
f, err = os.OpenFile(name, syscall.O_RDWR|syscall.O_NOCTTY, 0666)
if err != nil |
defer func() {
if err | {
return nil, err
} | conditional_block |
goendpoints.go | printf("MaxPower = %d\n", Data.MaxPower);
// printf("PnP = %d\n", Data.PnP) ;
// printf("SelfPowered = %d\n", Data.SelfPowered);
// printf("RemoteWakeup = %d\n", Data.RemoteWakeup);
// if (ftDevice == FT_DEVICE_232R)
// {
// /* Rev 6 (FT232R) extensions */
// printf("232R:\n");
// printf("-----\n");
// printf("\tUseExtOsc = 0x%X\n", Data.UseExtOsc); // Use External Oscillator
// printf("\tHighDriveIOs = 0x%X\n", Data.HighDriveIOs); // High Drive I/Os
// printf("\tEndpointSize = 0x%X\n", Data.EndpointSize); // Endpoint size
//
// printf("\tPullDownEnableR = 0x%X\n", Data.PullDownEnableR); // non-zero if pull down enabled
// printf("\tSerNumEnableR = 0x%X\n", Data.SerNumEnableR); // non-zero if serial number to be used
//
// printf("\tInvertTXD = 0x%X\n", Data.InvertTXD); // non-zero if invert TXD
// printf("\tInvertRXD = 0x%X\n", Data.InvertRXD); // non-zero if invert RXD
// printf("\tInvertRTS = 0x%X\n", Data.InvertRTS); // non-zero if invert RTS
// printf("\tInvertCTS = 0x%X\n", Data.InvertCTS); // non-zero if invert CTS
// printf("\tInvertDTR = 0x%X\n", Data.InvertDTR); // non-zero if invert DTR
// printf("\tInvertDSR = 0x%X\n", Data.InvertDSR); // non-zero if invert DSR
// printf("\tInvertDCD = 0x%X\n", Data.InvertDCD); // non-zero if invert DCD
// printf("\tInvertRI = 0x%X\n", Data.InvertRI); // non-zero if invert RI
//
// printf("\tCbus0 = 0x%X\n", Data.Cbus0); // Cbus Mux control
// printf("\tCbus1 = 0x%X\n", Data.Cbus1); // Cbus Mux control
// printf("\tCbus2 = 0x%X\n", Data.Cbus2); // Cbus Mux control
// printf("\tCbus3 = 0x%X\n", Data.Cbus3); // Cbus Mux control
// printf("\tCbus4 = 0x%X\n", Data.Cbus4); // Cbus Mux control
//
// printf("\tRIsD2XX = 0x%X\n", Data.RIsD2XX); // non-zero if using D2XX
// }
// ftStatus = FT_SetBaudRate(ftHandle0, 115200); // Set baud rate to 115200
// if (ftStatus == FT_OK) {
// printf("FT_SetBaudRate OK\n");
// }
// else {
// printf("FT_SetBaudRate Failed\n");
// }
// ftStatus = FT_SetFlowControl(ftHandle0, FT_FLOW_RTS_CTS, 0x11, 0x13);
// if (ftStatus == FT_OK) {
// printf("FT_SetFlowControl OK\n");
// }
// else {
// printf("FT_SetFlowControl Failed\n");
// }
// UCHAR LatencyTimer = 1;
// ftStatus = FT_SetLatencyTimer(ftHandle0, LatencyTimer );
// if (ftStatus == FT_OK) {
// printf("Set LatencyTimer: %u\n", LatencyTimer );
// }
// else {
// printf("FT_SetLatencyTimer failed\n");
// retCode = 1;
// goto exit;
// }
// DWORD TransferSize = 64;
// ftStatus = FT_SetUSBParameters(ftHandle0, TransferSize, TransferSize);
// if (ftStatus == FT_OK) {
// printf("In/Out transfer size set to 64 bytes\n");
// }
// else {
// printf("FT_SetUSBParameters failed\n");
// retCode = 1;
// goto exit;
// }
// FT_SetDataCharacteristics(ftHandle0, FT_BITS_8, FT_STOP_BITS_1, FT_PARITY_NONE);
//
//
//exit:
// free(Data.Manufacturer);
// free(Data.ManufacturerId);
// free(Data.Description);
// free(Data.SerialNumber);
// printf("Returning %d\n", retCode);
// return retCode;
//}
//
// unsigned int get_cyclecount (void)
// {
// unsigned int value;
// // Read CCNT Register
// asm volatile ("MRC p15, 0, %0, c9, c13, 0\t\n": "=r"(value));
// return value;
// }
//
// void init_perfcounters (int32_t do_reset, int32_t enable_divider)
// {
// // in general enable all counters (including cycle counter)
// int32_t value = 1;
//
// // peform reset:
// if (do_reset)
// {
// value |= 2; // reset all counters to zero.
// value |= 4; // reset cycle counter to zero.
// }
//
// if (enable_divider)
// value |= 8; // enable "by 64" divider for CCNT.
//
// value |= 16;
//
// // program the performance-counter control-register:
// asm volatile ("MCR p15, 0, %0, c9, c12, 0\t\n" :: "r"(value));
//
// // enable all counters:
// asm volatile ("MCR p15, 0, %0, c9, c12, 1\t\n" :: "r"(0x8000000f));
//
// // clear overflows:
// asm volatile ("MCR p15, 0, %0, c9, c12, 3\t\n" :: "r"(0x8000000f));
// }
import "C"
import "fmt"
import "os"
import "syscall"
import "unsafe"
import "time"
import "strconv"
import "strings"
import "sort"
import "log"
var timeStampLayout = "02:Jan:2006:15:04:05.000000"
var samples = 100
func openPort(name string) (f *os.File, err error) {
f, err = os.OpenFile(name, syscall.O_RDWR|syscall.O_NOCTTY, 0666)
if err != nil {
return nil, err
}
defer func() {
if err != nil && f != nil {
f.Close()
}
}()
fd := f.Fd()
// Set serial port 'name' to 115200/8/N/1 in RAW mode (i.e. no pre-process of received data
// and pay special attention to Cc field, this tells the serial port to not return until at
// at least syscall.VMIN bytes have been read. This is a tunable parameter they may help in Lab 3
t := syscall.Termios{
Iflag: syscall.IGNPAR,
Cflag: syscall.CS8 | syscall.CREAD | syscall.CLOCAL | syscall.B115200,
Cc: [32]uint8{syscall.VMIN: 27},
Ispeed: syscall.B115200,
Ospeed: syscall.B115200,
}
// Syscall to apply these parameters
_, _, errno := syscall.Syscall6(
syscall.SYS_IOCTL,
uintptr(fd),
uintptr(syscall.TCSETS),
uintptr(unsafe.Pointer(&t)),
0,
0,
0,
)
if errno != 0 {
return nil, errno
}
return f, nil
}
type int64arr []int64
func (a int64arr) Len() int { return len(a) }
func (a int64arr) Swap(i, j int){ a[i], a[j] = a[j], a[i] }
func (a int64arr) Less(i, j int) bool { return a[i] < a[j] }
func generateTimestamp() string {
t := time.Now()
return t.Format(timeStampLayout)
}
func parseTimestamp(timestamp string) time.Time {
t, e := time.Parse(timeStampLayout, timestamp)
if e != nil {
fmt.Printf("Parse error occured: %v\n", e)
}
return t
}
func calculateDelayRTT(p0 string, p1 string, p2 string, p3 string) int64 {
// parse time stamp string | random_line_split |
||
goendpoints.go | printf("ProductId = 0x%04X\n", Data.ProductId);
// printf("Manufacturer = %s\n", Data.Manufacturer);
// printf("ManufacturerId = %s\n", Data.ManufacturerId);
// printf("Description = %s\n", Data.Description);
// printf("SerialNumber = %s\n", Data.SerialNumber);
// printf("MaxPower = %d\n", Data.MaxPower);
// printf("PnP = %d\n", Data.PnP) ;
// printf("SelfPowered = %d\n", Data.SelfPowered);
// printf("RemoteWakeup = %d\n", Data.RemoteWakeup);
// if (ftDevice == FT_DEVICE_232R)
// {
// /* Rev 6 (FT232R) extensions */
// printf("232R:\n");
// printf("-----\n");
// printf("\tUseExtOsc = 0x%X\n", Data.UseExtOsc); // Use External Oscillator
// printf("\tHighDriveIOs = 0x%X\n", Data.HighDriveIOs); // High Drive I/Os
// printf("\tEndpointSize = 0x%X\n", Data.EndpointSize); // Endpoint size
//
// printf("\tPullDownEnableR = 0x%X\n", Data.PullDownEnableR); // non-zero if pull down enabled
// printf("\tSerNumEnableR = 0x%X\n", Data.SerNumEnableR); // non-zero if serial number to be used
//
// printf("\tInvertTXD = 0x%X\n", Data.InvertTXD); // non-zero if invert TXD
// printf("\tInvertRXD = 0x%X\n", Data.InvertRXD); // non-zero if invert RXD
// printf("\tInvertRTS = 0x%X\n", Data.InvertRTS); // non-zero if invert RTS
// printf("\tInvertCTS = 0x%X\n", Data.InvertCTS); // non-zero if invert CTS
// printf("\tInvertDTR = 0x%X\n", Data.InvertDTR); // non-zero if invert DTR
// printf("\tInvertDSR = 0x%X\n", Data.InvertDSR); // non-zero if invert DSR
// printf("\tInvertDCD = 0x%X\n", Data.InvertDCD); // non-zero if invert DCD
// printf("\tInvertRI = 0x%X\n", Data.InvertRI); // non-zero if invert RI
//
// printf("\tCbus0 = 0x%X\n", Data.Cbus0); // Cbus Mux control
// printf("\tCbus1 = 0x%X\n", Data.Cbus1); // Cbus Mux control
// printf("\tCbus2 = 0x%X\n", Data.Cbus2); // Cbus Mux control
// printf("\tCbus3 = 0x%X\n", Data.Cbus3); // Cbus Mux control
// printf("\tCbus4 = 0x%X\n", Data.Cbus4); // Cbus Mux control
//
// printf("\tRIsD2XX = 0x%X\n", Data.RIsD2XX); // non-zero if using D2XX
// }
// ftStatus = FT_SetBaudRate(ftHandle0, 115200); // Set baud rate to 115200
// if (ftStatus == FT_OK) {
// printf("FT_SetBaudRate OK\n");
// }
// else {
// printf("FT_SetBaudRate Failed\n");
// }
// ftStatus = FT_SetFlowControl(ftHandle0, FT_FLOW_RTS_CTS, 0x11, 0x13);
// if (ftStatus == FT_OK) {
// printf("FT_SetFlowControl OK\n");
// }
// else {
// printf("FT_SetFlowControl Failed\n");
// }
// UCHAR LatencyTimer = 1;
// ftStatus = FT_SetLatencyTimer(ftHandle0, LatencyTimer );
// if (ftStatus == FT_OK) {
// printf("Set LatencyTimer: %u\n", LatencyTimer );
// }
// else {
// printf("FT_SetLatencyTimer failed\n");
// retCode = 1;
// goto exit;
// }
// DWORD TransferSize = 64;
// ftStatus = FT_SetUSBParameters(ftHandle0, TransferSize, TransferSize);
// if (ftStatus == FT_OK) {
// printf("In/Out transfer size set to 64 bytes\n");
// }
// else {
// printf("FT_SetUSBParameters failed\n");
// retCode = 1;
// goto exit;
// }
// FT_SetDataCharacteristics(ftHandle0, FT_BITS_8, FT_STOP_BITS_1, FT_PARITY_NONE);
//
//
//exit:
// free(Data.Manufacturer);
// free(Data.ManufacturerId);
// free(Data.Description);
// free(Data.SerialNumber);
// printf("Returning %d\n", retCode);
// return retCode;
//}
//
// unsigned int get_cyclecount (void)
// {
// unsigned int value;
// // Read CCNT Register
// asm volatile ("MRC p15, 0, %0, c9, c13, 0\t\n": "=r"(value));
// return value;
// }
//
// void init_perfcounters (int32_t do_reset, int32_t enable_divider)
// {
// // in general enable all counters (including cycle counter)
// int32_t value = 1;
//
// // peform reset:
// if (do_reset)
// {
// value |= 2; // reset all counters to zero.
// value |= 4; // reset cycle counter to zero.
// }
//
// if (enable_divider)
// value |= 8; // enable "by 64" divider for CCNT.
//
// value |= 16;
//
// // program the performance-counter control-register:
// asm volatile ("MCR p15, 0, %0, c9, c12, 0\t\n" :: "r"(value));
//
// // enable all counters:
// asm volatile ("MCR p15, 0, %0, c9, c12, 1\t\n" :: "r"(0x8000000f));
//
// // clear overflows:
// asm volatile ("MCR p15, 0, %0, c9, c12, 3\t\n" :: "r"(0x8000000f));
// }
import "C"
import "fmt"
import "os"
import "syscall"
import "unsafe"
import "time"
import "strconv"
import "strings"
import "sort"
import "log"
var timeStampLayout = "02:Jan:2006:15:04:05.000000"
var samples = 100
func openPort(name string) (f *os.File, err error) {
f, err = os.OpenFile(name, syscall.O_RDWR|syscall.O_NOCTTY, 0666)
if err != nil {
return nil, err
}
defer func() {
if err != nil && f != nil {
f.Close()
}
}()
fd := f.Fd()
// Set serial port 'name' to 115200/8/N/1 in RAW mode (i.e. no pre-process of received data
// and pay special attention to Cc field, this tells the serial port to not return until at
// at least syscall.VMIN bytes have been read. This is a tunable parameter they may help in Lab 3
t := syscall.Termios{
Iflag: syscall.IGNPAR,
Cflag: syscall.CS8 | syscall.CREAD | syscall.CLOCAL | syscall.B115200,
Cc: [32]uint8{syscall.VMIN: 27},
Ispeed: syscall.B115200,
Ospeed: syscall.B115200,
}
// Syscall to apply these parameters
_, _, errno := syscall.Syscall6(
syscall.SYS_IOCTL,
uintptr(fd),
uintptr(syscall.TCSETS),
uintptr(unsafe.Pointer(&t)),
0,
0,
0,
)
if errno != 0 {
return nil, errno
}
return f, nil
}
type int64arr []int64
func (a int64arr) Len() int { return len(a) }
func (a int64arr) Swap(i, j int){ a[i], a[j] = a[j], a[i] }
func (a int64arr) Less(i, j int) bool { return a[i] < a[j] }
func generateTimestamp() string | {
t := time.Now()
return t.Format(timeStampLayout)
} | identifier_body |
|
movegen.rs | square: Square,
bitboard: BitBoard,
promotion: bool,
}
impl SquareAndBitBoard {
pub fn new(sq: Square, bb: BitBoard, promotion: bool) -> SquareAndBitBoard {
SquareAndBitBoard {
square: sq,
bitboard: bb,
promotion: promotion,
}
}
}
pub type MoveList = NoDrop<ArrayVec<SquareAndBitBoard, 18>>;
/// An incremental move generator
///
/// This structure enumerates moves slightly slower than board.enumerate_moves(...),
/// but has some extra features, such as:
///
/// * Being an iterator
/// * Not requiring you to create a buffer
/// * Only iterating moves that match a certain pattern
/// * Being iterable multiple times (such as, iterating once for all captures, then iterating again
/// for all quiets)
/// * Doing as little work early on as possible, so that if you are not going to look at every move, the
/// struture moves faster
/// * Being able to iterate pseudo legal moves, while keeping the (nearly) free legality checks in
/// place
///
/// # Examples
///
/// ```
/// use chess::MoveGen;
/// use chess::Board;
/// use chess::EMPTY;
/// use chess::construct;
///
/// // create a board with the initial position
/// let board = Board::default();
///
/// // create an iterable
/// let mut iterable = MoveGen::new_legal(&board);
///
/// // make sure .len() works.
/// assert_eq!(iterable.len(), 20); // the .len() function does *not* consume the iterator
///
/// // lets iterate over targets.
/// let targets = board.color_combined(!board.side_to_move());
/// iterable.set_iterator_mask(*targets);
///
/// // count the number of targets
/// let mut count = 0;
/// for _ in &mut iterable {
/// count += 1;
/// // This move captures one of my opponents pieces (with the exception of en passant)
/// }
///
/// // now, iterate over the rest of the moves
/// iterable.set_iterator_mask(!EMPTY);
/// for _ in &mut iterable {
/// count += 1;
/// // This move does not capture anything
/// }
///
/// // make sure it works
/// assert_eq!(count, 20);
///
/// ```
pub struct MoveGen {
moves: MoveList,
promotion_index: usize,
iterator_mask: BitBoard,
index: usize,
}
impl MoveGen {
#[inline(always)]
fn enumerate_moves(board: &Board) -> MoveList {
let checkers = *board.checkers();
let mask = !board.color_combined(board.side_to_move());
let mut movelist = NoDrop::new(ArrayVec::<SquareAndBitBoard, 18>::new());
if checkers == EMPTY {
PawnType::legals::<NotInCheckType>(&mut movelist, &board, mask);
KnightType::legals::<NotInCheckType>(&mut movelist, &board, mask);
BishopType::legals::<NotInCheckType>(&mut movelist, &board, mask);
RookType::legals::<NotInCheckType>(&mut movelist, &board, mask);
QueenType::legals::<NotInCheckType>(&mut movelist, &board, mask);
KingType::legals::<NotInCheckType>(&mut movelist, &board, mask);
} else if checkers.popcnt() == 1 {
PawnType::legals::<InCheckType>(&mut movelist, &board, mask);
KnightType::legals::<InCheckType>(&mut movelist, &board, mask);
BishopType::legals::<InCheckType>(&mut movelist, &board, mask);
RookType::legals::<InCheckType>(&mut movelist, &board, mask);
QueenType::legals::<InCheckType>(&mut movelist, &board, mask);
KingType::legals::<InCheckType>(&mut movelist, &board, mask);
} else {
KingType::legals::<InCheckType>(&mut movelist, &board, mask);
}
movelist
}
/// Create a new `MoveGen` structure, only generating legal moves
#[inline(always)]
pub fn new_legal(board: &Board) -> MoveGen {
MoveGen {
moves: MoveGen::enumerate_moves(board),
promotion_index: 0,
iterator_mask: !EMPTY,
index: 0,
}
}
/// Never, ever, iterate any moves that land on the following squares
pub fn remove_mask(&mut self, mask: BitBoard) {
for x in 0..self.moves.len() {
self.moves[x].bitboard &= !mask;
}
}
/// Never, ever, iterate this move
pub fn remove_move(&mut self, chess_move: ChessMove) -> bool {
for x in 0..self.moves.len() {
if self.moves[x].square == chess_move.get_source() {
self.moves[x].bitboard &= !BitBoard::from_square(chess_move.get_dest());
return true;
}
}
false
}
/// For now, Only iterate moves that land on the following squares
/// Note: Once iteration is completed, you can pass in a mask of ! `EMPTY`
/// to get the remaining moves, or another mask
pub fn set_iterator_mask(&mut self, mask: BitBoard) {
self.iterator_mask = mask;
self.index = 0;
// the iterator portion of this struct relies on the invariant that
// the bitboards at the beginning of the moves[] array are the only
// ones used. As a result, we must partition the list such that the
// assumption is true.
// first, find the first non-used moves index, and store that in i
let mut i = 0;
while i < self.moves.len() && self.moves[i].bitboard & self.iterator_mask != EMPTY {
i += 1;
}
// next, find each element past i where the moves are used, and store
// that in i. Then, increment i to point to a new unused slot.
for j in (i + 1)..self.moves.len() {
if self.moves[j].bitboard & self.iterator_mask != EMPTY {
let backup = self.moves[i];
self.moves[i] = self.moves[j];
self.moves[j] = backup;
i += 1;
}
}
}
/// This function checks the legality *only for moves generated by `MoveGen`*.
///
/// Calling this function for moves not generated by `MoveGen` will result in possibly
/// incorrect results, and making that move on the `Board` will result in undefined behavior.
/// This function may panic! if these rules are not followed.
///
/// If you are validating a move from a user, you should call the .legal() function.
pub fn legal_quick(board: &Board, chess_move: ChessMove) -> bool {
let piece = board.piece_on(chess_move.get_source()).unwrap();
match piece {
Piece::Rook => true,
Piece::Bishop => true,
Piece::Knight => true,
Piece::Queen => true,
Piece::Pawn => {
if chess_move.get_source().get_file() != chess_move.get_dest().get_file()
&& board.piece_on(chess_move.get_dest()).is_none()
{
// en-passant
PawnType::legal_ep_move(board, chess_move.get_source(), chess_move.get_dest())
} else {
true
}
}
Piece::King => {
let bb = between(chess_move.get_source(), chess_move.get_dest());
if bb.popcnt() == 1 {
// castles
if !KingType::legal_king_move(board, bb.to_square()) {
false
} else {
KingType::legal_king_move(board, chess_move.get_dest())
}
} else {
KingType::legal_king_move(board, chess_move.get_dest())
}
}
}
}
/// Fastest perft test with this structure
pub fn movegen_perft_test(board: &Board, depth: usize) -> usize {
let iterable = MoveGen::new_legal(board);
let mut result: usize = 0;
if depth == 1 {
iterable.len()
} else {
for m in iterable {
let bresult = board.make_move_new(m);
result += MoveGen::movegen_perft_test(&bresult, depth - 1);
}
result
}
}
#[cfg(test)]
/// Do a perft test after splitting the moves up into two groups
pub fn movegen_perft_test_piecewise(board: &Board, depth: usize) -> usize {
let mut iterable = MoveGen::new_legal(board);
let targets = board.color_combined(!board.side_to_move());
let mut result: usize = 0;
if depth == 1 {
iterable.set_iterator_mask(*targets);
result += iterable.len();
iterable.set_iterator_mask(!targets);
result += iterable.len();
result
} else {
iterable.set_iterator_mask(*targets);
for x in &mut iterable {
let mut bresult = mem::MaybeUninit::<Board>::uninit();
unsafe {
board.make_move(x, &mut *bresult.as_mut | pub struct SquareAndBitBoard { | random_line_split |
|
movegen.rs | ).popcnt() as usize;
}
}
result
}
}
impl Iterator for MoveGen {
type Item = ChessMove;
/// Give a size_hint to some functions that need it
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
/// Find the next chess move.
fn next(&mut self) -> Option<ChessMove> {
if self.index >= self.moves.len()
|| self.moves[self.index].bitboard & self.iterator_mask == EMPTY
{
// are we done?
None
} else if self.moves[self.index].promotion {
let moves = &mut self.moves[self.index];
let dest = (moves.bitboard & self.iterator_mask).to_square();
// deal with potential promotions for this pawn
let result = ChessMove::new(
moves.square,
dest,
Some(PROMOTION_PIECES[self.promotion_index]),
);
self.promotion_index += 1;
if self.promotion_index >= NUM_PROMOTION_PIECES {
moves.bitboard ^= BitBoard::from_square(dest);
self.promotion_index = 0;
if moves.bitboard & self.iterator_mask == EMPTY {
self.index += 1;
}
}
Some(result)
} else {
// not a promotion move, so its a 'normal' move as far as this function is concerned
let moves = &mut self.moves[self.index];
let dest = (moves.bitboard & self.iterator_mask).to_square();
moves.bitboard ^= BitBoard::from_square(dest);
if moves.bitboard & self.iterator_mask == EMPTY {
self.index += 1;
}
Some(ChessMove::new(moves.square, dest, None))
}
}
}
#[cfg(test)]
use crate::board_builder::BoardBuilder;
#[cfg(test)]
use std::collections::HashSet;
#[cfg(test)]
use std::convert::TryInto;
#[cfg(test)]
use std::str::FromStr;
#[cfg(test)]
fn movegen_perft_test(fen: String, depth: usize, result: usize) {
let board: Board = BoardBuilder::from_str(&fen).unwrap().try_into().unwrap();
assert_eq!(MoveGen::movegen_perft_test(&board, depth), result);
assert_eq!(MoveGen::movegen_perft_test_piecewise(&board, depth), result);
}
#[test]
fn movegen_perft_kiwipete() {
movegen_perft_test(
"r3k2r/p1ppqpb1/bn2pnp1/3PN3/1p2P3/2N2Q1p/PPPBBPPP/R3K2R w KQkq - 0 1".to_owned(),
5,
193690690,
);
}
#[test]
fn movegen_perft_1() {
movegen_perft_test("8/5bk1/8/2Pp4/8/1K6/8/8 w - d6 0 1".to_owned(), 6, 824064);
// Invalid FEN
}
#[test]
fn movegen_perft_2() {
movegen_perft_test("8/8/1k6/8/2pP4/8/5BK1/8 b - d3 0 1".to_owned(), 6, 824064);
// Invalid FEN
}
#[test]
fn movegen_perft_3() {
movegen_perft_test("8/8/1k6/2b5/2pP4/8/5K2/8 b - d3 0 1".to_owned(), 6, 1440467);
}
#[test]
fn movegen_perft_4() {
movegen_perft_test("8/5k2/8/2Pp4/2B5/1K6/8/8 w - d6 0 1".to_owned(), 6, 1440467);
}
#[test]
fn movegen_perft_5() {
movegen_perft_test("5k2/8/8/8/8/8/8/4K2R w K - 0 1".to_owned(), 6, 661072);
}
#[test]
fn movegen_perft_6() {
movegen_perft_test("4k2r/8/8/8/8/8/8/5K2 b k - 0 1".to_owned(), 6, 661072);
}
#[test]
fn movegen_perft_7() {
movegen_perft_test("3k4/8/8/8/8/8/8/R3K3 w Q - 0 1".to_owned(), 6, 803711);
}
#[test]
fn movegen_perft_8() {
movegen_perft_test("r3k3/8/8/8/8/8/8/3K4 b q - 0 1".to_owned(), 6, 803711);
}
#[test]
fn movegen_perft_9() {
movegen_perft_test(
"r3k2r/1b4bq/8/8/8/8/7B/R3K2R w KQkq - 0 1".to_owned(),
4,
1274206,
);
}
#[test]
fn movegen_perft_10() {
movegen_perft_test(
"r3k2r/7b/8/8/8/8/1B4BQ/R3K2R b KQkq - 0 1".to_owned(),
4,
1274206,
);
}
#[test]
fn movegen_perft_11() {
movegen_perft_test(
"r3k2r/8/3Q4/8/8/5q2/8/R3K2R b KQkq - 0 1".to_owned(),
4,
1720476,
);
}
#[test]
fn movegen_perft_12() {
movegen_perft_test(
"r3k2r/8/5Q2/8/8/3q4/8/R3K2R w KQkq - 0 1".to_owned(),
4,
1720476,
);
}
#[test]
fn movegen_perft_13() {
movegen_perft_test("2K2r2/4P3/8/8/8/8/8/3k4 w - - 0 1".to_owned(), 6, 3821001);
}
#[test]
fn movegen_perft_14() {
movegen_perft_test("3K4/8/8/8/8/8/4p3/2k2R2 b - - 0 1".to_owned(), 6, 3821001);
}
#[test]
fn movegen_perft_15() {
movegen_perft_test("8/8/1P2K3/8/2n5/1q6/8/5k2 b - - 0 1".to_owned(), 5, 1004658);
}
#[test]
fn movegen_perft_16() {
movegen_perft_test("5K2/8/1Q6/2N5/8/1p2k3/8/8 w - - 0 1".to_owned(), 5, 1004658);
}
#[test]
fn movegen_perft_17() {
movegen_perft_test("4k3/1P6/8/8/8/8/K7/8 w - - 0 1".to_owned(), 6, 217342);
}
#[test]
fn movegen_perft_18() {
movegen_perft_test("8/k7/8/8/8/8/1p6/4K3 b - - 0 1".to_owned(), 6, 217342);
}
#[test]
fn movegen_perft_19() {
movegen_perft_test("8/P1k5/K7/8/8/8/8/8 w - - 0 1".to_owned(), 6, 92683);
}
#[test]
fn movegen_perft_20() {
movegen_perft_test("8/8/8/8/8/k7/p1K5/8 b - - 0 1".to_owned(), 6, 92683);
}
#[test]
fn movegen_perft_21() {
movegen_perft_test("K1k5/8/P7/8/8/8/8/8 w - - 0 1".to_owned(), 6, 2217);
}
#[test]
fn | movegen_perft_22 | identifier_name |
|
movegen.rs | _legal(board: &Board) -> MoveGen {
MoveGen {
moves: MoveGen::enumerate_moves(board),
promotion_index: 0,
iterator_mask: !EMPTY,
index: 0,
}
}
/// Never, ever, iterate any moves that land on the following squares
pub fn remove_mask(&mut self, mask: BitBoard) {
for x in 0..self.moves.len() {
self.moves[x].bitboard &= !mask;
}
}
/// Never, ever, iterate this move
pub fn remove_move(&mut self, chess_move: ChessMove) -> bool {
for x in 0..self.moves.len() {
if self.moves[x].square == chess_move.get_source() {
self.moves[x].bitboard &= !BitBoard::from_square(chess_move.get_dest());
return true;
}
}
false
}
/// For now, Only iterate moves that land on the following squares
/// Note: Once iteration is completed, you can pass in a mask of ! `EMPTY`
/// to get the remaining moves, or another mask
pub fn set_iterator_mask(&mut self, mask: BitBoard) {
self.iterator_mask = mask;
self.index = 0;
// the iterator portion of this struct relies on the invariant that
// the bitboards at the beginning of the moves[] array are the only
// ones used. As a result, we must partition the list such that the
// assumption is true.
// first, find the first non-used moves index, and store that in i
let mut i = 0;
while i < self.moves.len() && self.moves[i].bitboard & self.iterator_mask != EMPTY {
i += 1;
}
// next, find each element past i where the moves are used, and store
// that in i. Then, increment i to point to a new unused slot.
for j in (i + 1)..self.moves.len() {
if self.moves[j].bitboard & self.iterator_mask != EMPTY {
let backup = self.moves[i];
self.moves[i] = self.moves[j];
self.moves[j] = backup;
i += 1;
}
}
}
/// This function checks the legality *only for moves generated by `MoveGen`*.
///
/// Calling this function for moves not generated by `MoveGen` will result in possibly
/// incorrect results, and making that move on the `Board` will result in undefined behavior.
/// This function may panic! if these rules are not followed.
///
/// If you are validating a move from a user, you should call the .legal() function.
pub fn legal_quick(board: &Board, chess_move: ChessMove) -> bool {
let piece = board.piece_on(chess_move.get_source()).unwrap();
match piece {
Piece::Rook => true,
Piece::Bishop => true,
Piece::Knight => true,
Piece::Queen => true,
Piece::Pawn => {
if chess_move.get_source().get_file() != chess_move.get_dest().get_file()
&& board.piece_on(chess_move.get_dest()).is_none()
{
// en-passant
PawnType::legal_ep_move(board, chess_move.get_source(), chess_move.get_dest())
} else {
true
}
}
Piece::King => {
let bb = between(chess_move.get_source(), chess_move.get_dest());
if bb.popcnt() == 1 {
// castles
if !KingType::legal_king_move(board, bb.to_square()) {
false
} else {
KingType::legal_king_move(board, chess_move.get_dest())
}
} else {
KingType::legal_king_move(board, chess_move.get_dest())
}
}
}
}
/// Fastest perft test with this structure
pub fn movegen_perft_test(board: &Board, depth: usize) -> usize {
let iterable = MoveGen::new_legal(board);
let mut result: usize = 0;
if depth == 1 {
iterable.len()
} else {
for m in iterable {
let bresult = board.make_move_new(m);
result += MoveGen::movegen_perft_test(&bresult, depth - 1);
}
result
}
}
#[cfg(test)]
/// Do a perft test after splitting the moves up into two groups
pub fn movegen_perft_test_piecewise(board: &Board, depth: usize) -> usize {
let mut iterable = MoveGen::new_legal(board);
let targets = board.color_combined(!board.side_to_move());
let mut result: usize = 0;
if depth == 1 {
iterable.set_iterator_mask(*targets);
result += iterable.len();
iterable.set_iterator_mask(!targets);
result += iterable.len();
result
} else {
iterable.set_iterator_mask(*targets);
for x in &mut iterable {
let mut bresult = mem::MaybeUninit::<Board>::uninit();
unsafe {
board.make_move(x, &mut *bresult.as_mut_ptr());
result += MoveGen::movegen_perft_test(&*bresult.as_ptr(), depth - 1);
}
}
iterable.set_iterator_mask(!EMPTY);
for x in &mut iterable {
let mut bresult = mem::MaybeUninit::<Board>::uninit();
unsafe {
board.make_move(x, &mut *bresult.as_mut_ptr());
result += MoveGen::movegen_perft_test(&*bresult.as_ptr(), depth - 1);
}
}
result
}
}
}
impl ExactSizeIterator for MoveGen {
/// Give the exact length of this iterator
fn len(&self) -> usize {
let mut result = 0;
for i in 0..self.moves.len() {
if self.moves[i].bitboard & self.iterator_mask == EMPTY {
break;
}
if self.moves[i].promotion {
result += ((self.moves[i].bitboard & self.iterator_mask).popcnt() as usize)
* NUM_PROMOTION_PIECES;
} else {
result += (self.moves[i].bitboard & self.iterator_mask).popcnt() as usize;
}
}
result
}
}
impl Iterator for MoveGen {
type Item = ChessMove;
/// Give a size_hint to some functions that need it
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
/// Find the next chess move.
fn next(&mut self) -> Option<ChessMove> {
if self.index >= self.moves.len()
|| self.moves[self.index].bitboard & self.iterator_mask == EMPTY
{
// are we done?
None
} else if self.moves[self.index].promotion {
let moves = &mut self.moves[self.index];
let dest = (moves.bitboard & self.iterator_mask).to_square();
// deal with potential promotions for this pawn
let result = ChessMove::new(
moves.square,
dest,
Some(PROMOTION_PIECES[self.promotion_index]),
);
self.promotion_index += 1;
if self.promotion_index >= NUM_PROMOTION_PIECES {
moves.bitboard ^= BitBoard::from_square(dest);
self.promotion_index = 0;
if moves.bitboard & self.iterator_mask == EMPTY {
self.index += 1;
}
}
Some(result)
} else {
// not a promotion move, so its a 'normal' move as far as this function is concerned
let moves = &mut self.moves[self.index];
let dest = (moves.bitboard & self.iterator_mask).to_square();
moves.bitboard ^= BitBoard::from_square(dest);
if moves.bitboard & self.iterator_mask == EMPTY {
self.index += 1;
}
Some(ChessMove::new(moves.square, dest, None))
}
}
}
#[cfg(test)]
use crate::board_builder::BoardBuilder;
#[cfg(test)]
use std::collections::HashSet;
#[cfg(test)]
use std::convert::TryInto;
#[cfg(test)]
use std::str::FromStr;
#[cfg(test)]
fn movegen_perft_test(fen: String, depth: usize, result: usize) {
let board: Board = BoardBuilder::from_str(&fen).unwrap().try_into().unwrap();
assert_eq!(MoveGen::movegen_perft_test(&board, depth), result);
assert_eq!(MoveGen::movegen_perft_test_piecewise(&board, depth), result);
}
#[test]
fn movegen_perft_kiwipete() {
movegen_perft_test(
"r3k2r/p1ppqpb1/bn2pnp1/3PN3/1p2P3/2N2Q1p/PPPBBPPP/R3K2R w KQkq - 0 1".to_owned(),
5,
193690690,
);
}
#[test]
fn movegen_perft_1() | {
movegen_perft_test("8/5bk1/8/2Pp4/8/1K6/8/8 w - d6 0 1".to_owned(), 6, 824064);
// Invalid FEN
} | identifier_body |
|
demo.js | (e, a, l) {
$("#" + e + "_slider").slider({
value: a,
min: 0,
max: 100,
step: 1,
slide: function(a, t) {
$(".btn").button("reset"), $("#" + e + "_percent").text(t.value + "%"), populateTotalPercent(t.value, e), fillInCalorieAmounts(t.value, l, e)
},
change: function(a, t) {
$("#presets > .btn").removeClass("active"), $("#" + e + "_percent").text(t.value + "%"), populateTotalPercent(t.value, e), fillInCalorieAmounts(t.value, l, e)
}
}), $("#" + e + "_percent").text($("#" + e + "_slider").slider("value") + "%")
}
function popupSliderCals() {
$("div#displayCalsAmount > span").text($("#calories").val()), $("div#displayCalsAmount").show(), fillInCalorieAmounts($("#carb_slider").slider("value"), 4, "carb"), fillInCalorieAmounts($("#protein_slider").slider("value"), 4, "protein"), fillInCalorieAmounts($("#fat_slider").slider("value"), 9, "fat")
}
function fillInCalorieAmounts(e, a, l) {
var t = $("#calories").val();
if ($.isNumeric(t)) {
t = parseFloat(t);
var s = Math.round(t * e * .01 / a);
$("#" + l + "_cals").text(s);
var i = $("#meals_per_day_input").val();
$.isNumeric(i) && $("#" + l + "_cals_per_meal").text(Math.round(s / i))
}
}
function moveSliders(e, a, l) {
$("#carb_slider").slider("value", e), $("#protein_slider").slider("value", a), $("#fat_slider").slider("value", l)
}
function calcDailyCals() {
var e = "standard" === $("input[name='units']:checked").val(),
a = validateDailyCalsValues(e);
if (a) alert(a);
else {
var l = 0,
t = parseFloat($("#weight").val());
e && (t *= .453592);
var s = parseFloat($("#feet_cm").val());
e && (s = 30.48 * s + 2.54 * parseFloat($("#inches").val()));
var i = parseFloat($("#age").val()),
r = $("input[name='sex']:checked").val(),
n = $("#activity_level").val();
l = "male" == r ? 88.362 + 13.397 * t + 4.799 * s - 5.677 * i : 447.593 + 9.247 * t + 3.098 * s - 4.33 * i, "no" === n ? l *= 1.2 : "light" === n ? l *= 1.375 : "moderate" === n ? l *= 1.55 : "heavy" === n ? l *= 1.725 : "extreme" === n && (l *= 1.9);
var o = Math.round(l + parseInt($("#gain_loss_amount").val()));
$("#calAmount").text(o > 1200 ? o : 1200), $("#modalMessage").hide(), $("#dc_results").show()
}
}
function validateDailyCalsValues(e) {
var a = "";
$.isNumeric($("#age").val()) || (a += "Age value must be a number\n"), $.isNumeric($("#weight").val()) || (a += "Weight value must be a number\n"), $.isNumeric($("#feet_cm").val()) || (a += e ? "Feet " : "Height ", a += "value must be a number\n");
var l = $("#inches").val();
return !e || $.isNumeric(l) || parseFloat(l) < 12 || (a += "Inches value must be a number less than 12\n"), a
}
function copyVal(e) {
$("#calories").val($("#" + e).text()), $("#myDailyCals").modal("hide"), setTimeout(popupSliderCals, 1e3)
}
$(function() {
function e(e) {
var a = $("#gain_loss_amount");
a.empty(), $.each(e, function(e, l) {
var t = {
value: l
};
0 === l && (t.selected = "selected"), a.append($("<option></option>").attr(t).text(e))
})
}
var a = {
"Lose 2 Pounds per Week": -1e3,
"Lose 1.5 Pounds per Week": -750,
"Lose 1 Pounds per Week": -500,
"Lose 0.5 Pounds per Week": -250,
"Stay the Same Weight": 0,
"Gain 0.5 Pound per Week": 250,
"Gain 1 Pound per Week": 500,
"Gain 1.5 Pounds per Week": 750,
"Gain 2 Pounds per Week": 1e3
},
l = {
"Lose 1 Kg per Week": -1100,
"Lose 0.75 Kg per Week": -825,
"Lose 0.5 Kg per Week": -550,
"Lose 0.25 Kg per Week": -275,
"Stay the Same Weight": 0,
"Gain 0.25 Kg per Week": 275,
"Gain 0.5 Kg per Week": 550,
"Gain 0.75 Kg per Week": 825,
"Gain 1 Kg per Week": 1100
};
setupSlider("carb", 50, 4), setupSlider("protein", 30, 4), setupSlider("fat", 20, 9), $("#gramsPerMeal").change(function() {
this.checked ? ($("#numberMeals").slideDown("slow"), $("#macro_table th:nth-child(3)").show(), $("#macro_table td:nth-child(4)").show()) : ($("#numberMeals").slideUp("slow"), $("#macro_table th:nth-child(3)").hide(), $("#macro_table td:nth-child(4)").hide())
}), $("#calculateBtn").click(function() {
var e = $("#calories").val();
$.isNumeric(e) ? popupSliderCals() : alert("Please enter a valid calorie amount")
}), $("#presets > .btn").click(function() {
$("#presets > .btn").removeClass("active"), $(this).toggleClass("active")
}), $("#sex > .btn").click(function() {
$("#sex > .btn").removeClass("active"), $(this).toggleClass("active")
}), $('input[name="units"]').change(function() {
"standard" === $(this).val() ? ($("#weigth_units").text("Pounds"), $("#height_units").text("Feet"), $(".inches").show(), e(a)) : ($("#weigth_units").text("Kg"), $("#height_units").text("Cm"), $(".inches").hide(), e(l))
}), e(a), $("#calories").focus()
});
//angular filesss
var app = angular.module('MYFabDiet', [
'ngRoute',
'mobile-angular-ui',
// touch/drag feature: this is from 'mobile-angular-ui.gestures.js'.
// This is intended to provide a flexible, integrated and and
// easy to use alternative to other 3rd party libs like hammer.js, with the
// final pourpose to integrate gestures into default ui interactions like
// opening sidebars, turning switches on/off ..
'mobile-angular-ui.gestures'
]);
app.run(function($transform) {
window.$transform = $transform;
});
//
// You can configure ngRoute as always, but to take advantage of SharedState location
// feature (i.e. close sidebar on backbutton) you should setup 'reloadOnSearch: false'
// in order to avoid unwanted routing.
//
app.config(function($routeProvider) {
$routeProvider.when('/', {templateUrl: 'home.html', reloadOnSearch: false});
$routeProvider.when('/scroll', {templateUrl: 'scroll.html', reloadOnSearch: false});
$routeProvider.when('/toggle', {templateUrl: 'toggle.html', reloadOnSearch: false});
$routeProvider.when('/tabs', {templateUrl: 'tabs.html', reloadOnSearch: false});
$routeProvider.when('/accordion', {templateUrl: 'accordion.html', reloadOnSearch: false});
$routeProvider.when('/overlay', {templateUrl: 'overlay.html', reloadOnSearch: false});
$routeProvider.when('/forms', {templateUrl: 'forms.html', reloadOnSearch: false});
$routeProvider.when('/login', {templateUrl: 'login.html', reloadOnSearch: false});
$routeProvider.when('/data', {templateUrl: 'data.html', reloadOnSearch: false});
});
//
// `$touch example`
//
app.directive('toucharea', ['$touch', function($touch) {
| setupSlider | identifier_name |
|
demo.js |
function validateDailyCalsValues(e) {
var a = "";
$.isNumeric($("#age").val()) || (a += "Age value must be a number\n"), $.isNumeric($("#weight").val()) || (a += "Weight value must be a number\n"), $.isNumeric($("#feet_cm").val()) || (a += e ? "Feet " : "Height ", a += "value must be a number\n");
var l = $("#inches").val();
return !e || $.isNumeric(l) || parseFloat(l) < 12 || (a += "Inches value must be a number less than 12\n"), a
}
function copyVal(e) {
$("#calories").val($("#" + e).text()), $("#myDailyCals").modal("hide"), setTimeout(popupSliderCals, 1e3)
}
$(function() {
function e(e) {
var a = $("#gain_loss_amount");
a.empty(), $.each(e, function(e, l) {
var t = {
value: l
};
0 === l && (t.selected = "selected"), a.append($("<option></option>").attr(t).text(e))
})
}
var a = {
"Lose 2 Pounds per Week": -1e3,
"Lose 1.5 Pounds per Week": -750,
"Lose 1 Pounds per Week": -500,
"Lose 0.5 Pounds per Week": -250,
"Stay the Same Weight": 0,
"Gain 0.5 Pound per Week": 250,
"Gain 1 Pound per Week": 500,
"Gain 1.5 Pounds per Week": 750,
"Gain 2 Pounds per Week": 1e3
},
l = {
"Lose 1 Kg per Week": -1100,
"Lose 0.75 Kg per Week": -825,
"Lose 0.5 Kg per Week": -550,
"Lose 0.25 Kg per Week": -275,
"Stay the Same Weight": 0,
"Gain 0.25 Kg per Week": 275,
"Gain 0.5 Kg per Week": 550,
"Gain 0.75 Kg per Week": 825,
"Gain 1 Kg per Week": 1100
};
setupSlider("carb", 50, 4), setupSlider("protein", 30, 4), setupSlider("fat", 20, 9), $("#gramsPerMeal").change(function() {
this.checked ? ($("#numberMeals").slideDown("slow"), $("#macro_table th:nth-child(3)").show(), $("#macro_table td:nth-child(4)").show()) : ($("#numberMeals").slideUp("slow"), $("#macro_table th:nth-child(3)").hide(), $("#macro_table td:nth-child(4)").hide())
}), $("#calculateBtn").click(function() {
var e = $("#calories").val();
$.isNumeric(e) ? popupSliderCals() : alert("Please enter a valid calorie amount")
}), $("#presets > .btn").click(function() {
$("#presets > .btn").removeClass("active"), $(this).toggleClass("active")
}), $("#sex > .btn").click(function() {
$("#sex > .btn").removeClass("active"), $(this).toggleClass("active")
}), $('input[name="units"]').change(function() {
"standard" === $(this).val() ? ($("#weigth_units").text("Pounds"), $("#height_units").text("Feet"), $(".inches").show(), e(a)) : ($("#weigth_units").text("Kg"), $("#height_units").text("Cm"), $(".inches").hide(), e(l))
}), e(a), $("#calories").focus()
});
//angular filesss
var app = angular.module('MYFabDiet', [
'ngRoute',
'mobile-angular-ui',
// touch/drag feature: this is from 'mobile-angular-ui.gestures.js'.
// This is intended to provide a flexible, integrated and and
// easy to use alternative to other 3rd party libs like hammer.js, with the
// final pourpose to integrate gestures into default ui interactions like
// opening sidebars, turning switches on/off ..
'mobile-angular-ui.gestures'
]);
app.run(function($transform) {
window.$transform = $transform;
});
//
// You can configure ngRoute as always, but to take advantage of SharedState location
// feature (i.e. close sidebar on backbutton) you should setup 'reloadOnSearch: false'
// in order to avoid unwanted routing.
//
app.config(function($routeProvider) {
$routeProvider.when('/', {templateUrl: 'home.html', reloadOnSearch: false});
$routeProvider.when('/scroll', {templateUrl: 'scroll.html', reloadOnSearch: false});
$routeProvider.when('/toggle', {templateUrl: 'toggle.html', reloadOnSearch: false});
$routeProvider.when('/tabs', {templateUrl: 'tabs.html', reloadOnSearch: false});
$routeProvider.when('/accordion', {templateUrl: 'accordion.html', reloadOnSearch: false});
$routeProvider.when('/overlay', {templateUrl: 'overlay.html', reloadOnSearch: false});
$routeProvider.when('/forms', {templateUrl: 'forms.html', reloadOnSearch: false});
$routeProvider.when('/login', {templateUrl: 'login.html', reloadOnSearch: false});
$routeProvider.when('/data', {templateUrl: 'data.html', reloadOnSearch: false});
});
//
// `$touch example`
//
app.directive('toucharea', ['$touch', function($touch) {
// Runs during compile
return {
restrict: 'C',
link: function($scope, elem) {
$scope.touch = null;
$touch.bind(elem, {
start: function(touch) {
$scope.containerRect = elem[0].getBoundingClientRect();
$scope.touch = touch;
$scope.$apply();
},
cancel: function(touch) {
$scope.touch = touch;
$scope.$apply();
},
move: function(touch) {
$scope.touch = touch;
$scope.$apply();
},
end: function(touch) {
$scope.touch = touch;
$scope.$apply();
}
});
}
};
}]);
//
// `$drag` example: drag to dismiss
//
app.directive('dragToDismiss', function($drag, $parse, $timeout) {
return {
restrict: 'A',
compile: function(elem, attrs) {
var dismissFn = $parse(attrs.dragToDismiss);
return function(scope, elem) {
var dismiss = false;
$drag.bind(elem, {
transform: $drag.TRANSLATE_RIGHT,
move: function(drag) {
if (drag.distanceX >= drag.rect.width / 4) {
dismiss = true;
elem.addClass('dismiss');
} else {
dismiss = false;
elem.removeClass('dismiss');
}
},
cancel: function() {
elem.removeClass('dismiss');
},
end: function(drag) {
if (dismiss) {
elem.addClass('dismitted');
$timeout(function() {
scope.$apply(function() {
dismissFn(scope);
});
}, 300);
} else {
drag.reset();
}
}
});
};
}
};
});
//
// Another `$drag` usage example: this is how you could create
// a touch enabled "deck of cards" carousel. See `carousel.html` for markup.
//
app.directive('carousel', function() {
return {
restrict: 'C',
scope: {},
controller | {
var e = "standard" === $("input[name='units']:checked").val(),
a = validateDailyCalsValues(e);
if (a) alert(a);
else {
var l = 0,
t = parseFloat($("#weight").val());
e && (t *= .453592);
var s = parseFloat($("#feet_cm").val());
e && (s = 30.48 * s + 2.54 * parseFloat($("#inches").val()));
var i = parseFloat($("#age").val()),
r = $("input[name='sex']:checked").val(),
n = $("#activity_level").val();
l = "male" == r ? 88.362 + 13.397 * t + 4.799 * s - 5.677 * i : 447.593 + 9.247 * t + 3.098 * s - 4.33 * i, "no" === n ? l *= 1.2 : "light" === n ? l *= 1.375 : "moderate" === n ? l *= 1.55 : "heavy" === n ? l *= 1.725 : "extreme" === n && (l *= 1.9);
var o = Math.round(l + parseInt($("#gain_loss_amount").val()));
$("#calAmount").text(o > 1200 ? o : 1200), $("#modalMessage").hide(), $("#dc_results").show()
}
} | identifier_body |
|
demo.js | 250,
"Gain 1 Pound per Week": 500,
"Gain 1.5 Pounds per Week": 750,
"Gain 2 Pounds per Week": 1e3
},
l = {
"Lose 1 Kg per Week": -1100,
"Lose 0.75 Kg per Week": -825,
"Lose 0.5 Kg per Week": -550,
"Lose 0.25 Kg per Week": -275,
"Stay the Same Weight": 0,
"Gain 0.25 Kg per Week": 275,
"Gain 0.5 Kg per Week": 550,
"Gain 0.75 Kg per Week": 825,
"Gain 1 Kg per Week": 1100
};
setupSlider("carb", 50, 4), setupSlider("protein", 30, 4), setupSlider("fat", 20, 9), $("#gramsPerMeal").change(function() {
this.checked ? ($("#numberMeals").slideDown("slow"), $("#macro_table th:nth-child(3)").show(), $("#macro_table td:nth-child(4)").show()) : ($("#numberMeals").slideUp("slow"), $("#macro_table th:nth-child(3)").hide(), $("#macro_table td:nth-child(4)").hide())
}), $("#calculateBtn").click(function() {
var e = $("#calories").val();
$.isNumeric(e) ? popupSliderCals() : alert("Please enter a valid calorie amount")
}), $("#presets > .btn").click(function() {
$("#presets > .btn").removeClass("active"), $(this).toggleClass("active")
}), $("#sex > .btn").click(function() {
$("#sex > .btn").removeClass("active"), $(this).toggleClass("active")
}), $('input[name="units"]').change(function() {
"standard" === $(this).val() ? ($("#weigth_units").text("Pounds"), $("#height_units").text("Feet"), $(".inches").show(), e(a)) : ($("#weigth_units").text("Kg"), $("#height_units").text("Cm"), $(".inches").hide(), e(l))
}), e(a), $("#calories").focus()
});
//angular filesss
var app = angular.module('MYFabDiet', [
'ngRoute',
'mobile-angular-ui',
// touch/drag feature: this is from 'mobile-angular-ui.gestures.js'.
// This is intended to provide a flexible, integrated and and
// easy to use alternative to other 3rd party libs like hammer.js, with the
// final pourpose to integrate gestures into default ui interactions like
// opening sidebars, turning switches on/off ..
'mobile-angular-ui.gestures'
]);
app.run(function($transform) {
window.$transform = $transform;
});
//
// You can configure ngRoute as always, but to take advantage of SharedState location
// feature (i.e. close sidebar on backbutton) you should setup 'reloadOnSearch: false'
// in order to avoid unwanted routing.
//
app.config(function($routeProvider) {
$routeProvider.when('/', {templateUrl: 'home.html', reloadOnSearch: false});
$routeProvider.when('/scroll', {templateUrl: 'scroll.html', reloadOnSearch: false});
$routeProvider.when('/toggle', {templateUrl: 'toggle.html', reloadOnSearch: false});
$routeProvider.when('/tabs', {templateUrl: 'tabs.html', reloadOnSearch: false});
$routeProvider.when('/accordion', {templateUrl: 'accordion.html', reloadOnSearch: false});
$routeProvider.when('/overlay', {templateUrl: 'overlay.html', reloadOnSearch: false});
$routeProvider.when('/forms', {templateUrl: 'forms.html', reloadOnSearch: false});
$routeProvider.when('/login', {templateUrl: 'login.html', reloadOnSearch: false});
$routeProvider.when('/data', {templateUrl: 'data.html', reloadOnSearch: false});
});
//
// `$touch example`
//
app.directive('toucharea', ['$touch', function($touch) {
// Runs during compile
return {
restrict: 'C',
link: function($scope, elem) {
$scope.touch = null;
$touch.bind(elem, {
start: function(touch) {
$scope.containerRect = elem[0].getBoundingClientRect();
$scope.touch = touch;
$scope.$apply();
},
cancel: function(touch) {
$scope.touch = touch;
$scope.$apply();
},
move: function(touch) {
$scope.touch = touch;
$scope.$apply();
},
end: function(touch) {
$scope.touch = touch;
$scope.$apply();
}
});
}
};
}]);
//
// `$drag` example: drag to dismiss
//
app.directive('dragToDismiss', function($drag, $parse, $timeout) {
return {
restrict: 'A',
compile: function(elem, attrs) {
var dismissFn = $parse(attrs.dragToDismiss);
return function(scope, elem) {
var dismiss = false;
$drag.bind(elem, {
transform: $drag.TRANSLATE_RIGHT,
move: function(drag) {
if (drag.distanceX >= drag.rect.width / 4) {
dismiss = true;
elem.addClass('dismiss');
} else {
dismiss = false;
elem.removeClass('dismiss');
}
},
cancel: function() {
elem.removeClass('dismiss');
},
end: function(drag) {
if (dismiss) {
elem.addClass('dismitted');
$timeout(function() {
scope.$apply(function() {
dismissFn(scope);
});
}, 300);
} else {
drag.reset();
}
}
});
};
}
};
});
//
// Another `$drag` usage example: this is how you could create
// a touch enabled "deck of cards" carousel. See `carousel.html` for markup.
//
app.directive('carousel', function() {
return {
restrict: 'C',
scope: {},
controller: function() {
this.itemCount = 0;
this.activeItem = null;
this.addItem = function() {
var newId = this.itemCount++;
this.activeItem = this.itemCount === 1 ? newId : this.activeItem;
return newId;
};
this.next = function() {
this.activeItem = this.activeItem || 0;
this.activeItem = this.activeItem === this.itemCount - 1 ? 0 : this.activeItem + 1;
};
this.prev = function() {
this.activeItem = this.activeItem || 0;
this.activeItem = this.activeItem === 0 ? this.itemCount - 1 : this.activeItem - 1;
};
}
};
});
app.directive('script', function() {
return {
restrict: 'E',
scope: false,
link: function(scope, elem, attr) {
if (attr.type === 'text/javascript-lazy') {
var code = elem.text();
var f = new Function(code);
f();
}
}
};
});
app.directive('carouselItem', function($drag) {
return {
restrict: 'C',
require: '^carousel',
scope: {},
transclude: true,
template: '<div class="item"><div ng-transclude></div></div>',
link: function(scope, elem, attrs, carousel) {
scope.carousel = carousel;
var id = carousel.addItem();
var zIndex = function() {
var res = 0;
if (id === carousel.activeItem) {
res = 2000;
} else if (carousel.activeItem < id) {
res = 2000 - (id - carousel.activeItem);
} else {
res = 2000 - (carousel.itemCount - 1 - carousel.activeItem + id);
}
return res;
};
scope.$watch(function() {
return carousel.activeItem;
}, function() {
elem[0].style.zIndex = zIndex();
});
$drag.bind(elem, {
//
// This is an example of custom transform function
//
transform: function(element, transform, touch) {
//
// use translate both as basis for the new transform:
//
var t = $drag.TRANSLATE_BOTH(element, transform, touch);
//
// Add rotation:
//
var Dx = touch.distanceX;
var t0 = touch.startTransform;
var sign = Dx < 0 ? -1 : 1;
var angle = sign * Math.min((Math.abs(Dx) / 700) * 30, 30);
t.rotateZ = angle + (Math.round(t0.rotateZ));
return t;
},
move: function(drag) {
if (Math.abs(drag.distanceX) >= drag.rect.width / 4) {
elem.addClass('dismiss');
} else {
elem.removeClass('dismiss');
}
},
cancel: function() {
elem.removeClass('dismiss');
},
end: function(drag) {
elem.removeClass('dismiss');
if (Math.abs(drag.distanceX) >= drag.rect.width / 4) {
scope.$apply(function() {
carousel.next(); | }); | random_line_split |
|
inter-compute.ts | .model.atomicHierarchy.residues;
const { occupancy: occupancyA } = unitA.model.atomicConformation;
const { occupancy: occupancyB } = unitB.model.atomicConformation;
const hasOccupancy = occupancyA.isDefined && occupancyB.isDefined;
const structConn = unitA.model === unitB.model && StructConn.Provider.get(unitA.model);
const indexPairs = !props.forceCompute && unitA.model === unitB.model && IndexPairBonds.Provider.get(unitA.model);
const { atomSourceIndex: sourceIndex } = unitA.model.atomicHierarchy;
const { invertedIndex } = indexPairs ? Model.getInvertedAtomSourceIndex(unitB.model) : { invertedIndex: void 0 };
const structConnExhaustive = unitA.model === unitB.model && StructConn.isExhaustive(unitA.model);
// the lookup queries need to happen in the "unitB space".
// that means _imageA = inverseOperB(operA(aI))
const imageTransform = Mat4.mul(_imageTransform, unitB.conformation.operator.inverse, unitA.conformation.operator.matrix);
const isNotIdentity = !Mat4.isIdentity(imageTransform);
const { center: bCenter, radius: bRadius } = unitB.boundary.sphere;
const testDistanceSq = (bRadius + maxRadius) * (bRadius + maxRadius);
builder.startUnitPair(unitA.id, unitB.id);
const opKeyA = unitA.conformation.operator.key;
const opKeyB = unitB.conformation.operator.key;
for (let _aI = 0 as StructureElement.UnitIndex; _aI < atomCount; _aI++) {
const aI = atomsA[_aI];
v3set(_imageA, xA[aI], yA[aI], zA[aI]);
if (isNotIdentity) v3transformMat4(_imageA, _imageA, imageTransform);
if (v3squaredDistance(_imageA, bCenter) > testDistanceSq) continue;
if (!props.forceCompute && indexPairs) {
const { maxDistance } = indexPairs;
const { offset, b, edgeProps: { order, distance, flag, key, operatorA, operatorB } } = indexPairs.bonds;
const srcA = sourceIndex.value(aI);
const aeI = getElementIdx(type_symbolA.value(aI));
for (let i = offset[srcA], il = offset[srcA + 1]; i < il; ++i) {
const bI = invertedIndex![b[i]];
const _bI = SortedArray.indexOf(unitB.elements, bI) as StructureElement.UnitIndex;
if (_bI < 0) continue;
const opA = operatorA[i];
const opB = operatorB[i];
if ((opA >= 0 && opA !== opKeyA && opA !== opKeyB) ||
(opB >= 0 && opB !== opKeyB && opB !== opKeyA)) continue;
const beI = getElementIdx(type_symbolA.value(bI));
const d = distance[i];
const dist = getDistance(unitA, aI, unitB, bI);
let add = false;
if (d >= 0) {
add = equalEps(dist, d, 0.3);
} else if (maxDistance >= 0) {
add = dist < maxDistance;
} else {
const pairingThreshold = getPairingThreshold(
aeI, beI, getElementThreshold(aeI), getElementThreshold(beI)
);
add = dist < pairingThreshold;
if (isHydrogen(aeI) && isHydrogen(beI)) {
// TODO handle molecular hydrogen
add = false;
}
}
if (add) {
builder.add(_aI, _bI, { order: order[i], flag: flag[i], key: key[i] });
}
}
continue; // assume `indexPairs` supplies all bonds
}
const structConnEntries = props.forceCompute ? void 0 : structConn && structConn.byAtomIndex.get(aI);
if (structConnEntries && structConnEntries.length) {
let added = false;
for (const se of structConnEntries) {
const { partnerA, partnerB } = se;
const p = partnerA.atomIndex === aI ? partnerB : partnerA;
const _bI = SortedArray.indexOf(unitB.elements, p.atomIndex) as StructureElement.UnitIndex;
if (_bI < 0) continue;
// check if the bond is within MAX_RADIUS for this pair of units
if (getDistance(unitA, aI, unitB, p.atomIndex) > maxRadius) continue;
builder.add(_aI, _bI, { order: se.order, flag: se.flags, key: se.rowIndex });
added = true;
}
// assume, for an atom, that if any inter unit bond is given
// all are given and thus we don't need to compute any other
if (added) continue;
}
if (structConnExhaustive) continue;
const occA = occupancyA.value(aI);
const { lookup3d } = unitB;
const { indices, count, squaredDistances } = lookup3d.find(_imageA[0], _imageA[1], _imageA[2], maxRadius);
if (count === 0) continue;
const aeI = getElementIdx(type_symbolA.value(aI));
const isHa = isHydrogen(aeI);
const thresholdA = getElementThreshold(aeI);
const altA = label_alt_idA.value(aI);
const metalA = MetalsSet.has(aeI);
const atomIdA = label_atom_idA.value(aI);
const compIdA = label_comp_idA.value(residueIndexA[aI]);
for (let ni = 0; ni < count; ni++) {
const _bI = indices[ni] as StructureElement.UnitIndex;
const bI = atomsB[_bI];
const altB = label_alt_idB.value(bI);
if (altA && altB && altA !== altB) continue;
// Do not include bonds between images of the same residue with partial occupancy.
// TODO: is this condition good enough?
// - It works for cases like 3WQJ (label_asym_id: I) which have partial occupancy.
// - Does NOT work for cases like 1RB8 (DC 7) with full occupancy.
if (hasOccupancy && occupancyB.value(bI) < 1 && occA < 1) {
if (auth_seq_idA.value(aI) === auth_seq_idB.value(bI)) {
continue;
}
}
const beI = getElementIdx(type_symbolB.value(bI)!);
const isHb = isHydrogen(beI);
if (isHa && isHb) continue;
const isMetal = (metalA || MetalsSet.has(beI)) && !(isHa || isHb);
const dist = Math.sqrt(squaredDistances[ni]);
if (dist === 0) continue;
const pairingThreshold = getPairingThreshold(aeI, beI, thresholdA, getElementThreshold(beI));
if (dist <= pairingThreshold) {
const atomIdB = label_atom_idB.value(bI);
const compIdB = label_comp_idB.value(residueIndexB[bI]);
builder.add(_aI, _bI, {
order: getInterBondOrderFromTable(compIdA, compIdB, atomIdA, atomIdB),
flag: (isMetal ? BondType.Flag.MetallicCoordination : BondType.Flag.Covalent) | BondType.Flag.Computed,
key: -1
});
}
}
}
builder.finishUnitPair();
}
export interface InterBondComputationProps extends BondComputationProps {
validUnit: (unit: Unit) => boolean
validUnitPair: (structure: Structure, unitA: Unit, unitB: Unit) => boolean
ignoreWater: boolean
ignoreIon: boolean
}
const DefaultInterBondComputationProps = {
...DefaultBondComputationProps,
ignoreWater: true,
ignoreIon: true,
};
function findBonds(structure: Structure, props: InterBondComputationProps) | {
const builder = new InterUnitGraph.Builder<number, StructureElement.UnitIndex, InterUnitEdgeProps>();
const hasIndexPairBonds = structure.models.some(m => IndexPairBonds.Provider.get(m));
const hasExhaustiveStructConn = structure.models.some(m => StructConn.isExhaustive(m));
if (props.noCompute || (structure.isCoarseGrained && !hasIndexPairBonds && !hasExhaustiveStructConn)) {
return new InterUnitBonds(builder.getMap());
}
Structure.eachUnitPair(structure, (unitA: Unit, unitB: Unit) => {
findPairBonds(unitA as Unit.Atomic, unitB as Unit.Atomic, props, builder);
}, {
maxRadius: props.maxRadius,
validUnit: (unit: Unit) => props.validUnit(unit),
validUnitPair: (unitA: Unit, unitB: Unit) => props.validUnitPair(structure, unitA, unitB)
});
return new InterUnitBonds(builder.getMap());
} | identifier_body |
|
inter-compute.ts | indexB: ElementIndex) {
unitA.conformation.position(indexA, tmpDistVecA);
unitB.conformation.position(indexB, tmpDistVecB);
return v3distance(tmpDistVecA, tmpDistVecB);
}
const _imageTransform = Mat4();
const _imageA = Vec3();
function findPairBonds(unitA: Unit.Atomic, unitB: Unit.Atomic, props: BondComputationProps, builder: InterUnitGraph.Builder<number, StructureElement.UnitIndex, InterUnitEdgeProps>) {
const { maxRadius } = props;
const { elements: atomsA, residueIndex: residueIndexA } = unitA;
const { x: xA, y: yA, z: zA } = unitA.model.atomicConformation;
const { elements: atomsB, residueIndex: residueIndexB } = unitB;
const atomCount = unitA.elements.length;
const { type_symbol: type_symbolA, label_alt_id: label_alt_idA, label_atom_id: label_atom_idA, label_comp_id: label_comp_idA } = unitA.model.atomicHierarchy.atoms;
const { type_symbol: type_symbolB, label_alt_id: label_alt_idB, label_atom_id: label_atom_idB, label_comp_id: label_comp_idB } = unitB.model.atomicHierarchy.atoms;
const { auth_seq_id: auth_seq_idA } = unitA.model.atomicHierarchy.residues;
const { auth_seq_id: auth_seq_idB } = unitB.model.atomicHierarchy.residues;
const { occupancy: occupancyA } = unitA.model.atomicConformation;
const { occupancy: occupancyB } = unitB.model.atomicConformation;
const hasOccupancy = occupancyA.isDefined && occupancyB.isDefined;
const structConn = unitA.model === unitB.model && StructConn.Provider.get(unitA.model);
const indexPairs = !props.forceCompute && unitA.model === unitB.model && IndexPairBonds.Provider.get(unitA.model);
const { atomSourceIndex: sourceIndex } = unitA.model.atomicHierarchy;
const { invertedIndex } = indexPairs ? Model.getInvertedAtomSourceIndex(unitB.model) : { invertedIndex: void 0 };
const structConnExhaustive = unitA.model === unitB.model && StructConn.isExhaustive(unitA.model);
// the lookup queries need to happen in the "unitB space".
// that means _imageA = inverseOperB(operA(aI))
const imageTransform = Mat4.mul(_imageTransform, unitB.conformation.operator.inverse, unitA.conformation.operator.matrix);
const isNotIdentity = !Mat4.isIdentity(imageTransform);
const { center: bCenter, radius: bRadius } = unitB.boundary.sphere;
const testDistanceSq = (bRadius + maxRadius) * (bRadius + maxRadius);
builder.startUnitPair(unitA.id, unitB.id);
const opKeyA = unitA.conformation.operator.key;
const opKeyB = unitB.conformation.operator.key;
for (let _aI = 0 as StructureElement.UnitIndex; _aI < atomCount; _aI++) {
const aI = atomsA[_aI];
v3set(_imageA, xA[aI], yA[aI], zA[aI]);
if (isNotIdentity) v3transformMat4(_imageA, _imageA, imageTransform);
if (v3squaredDistance(_imageA, bCenter) > testDistanceSq) continue;
if (!props.forceCompute && indexPairs) {
const { maxDistance } = indexPairs;
const { offset, b, edgeProps: { order, distance, flag, key, operatorA, operatorB } } = indexPairs.bonds;
const srcA = sourceIndex.value(aI);
const aeI = getElementIdx(type_symbolA.value(aI));
for (let i = offset[srcA], il = offset[srcA + 1]; i < il; ++i) {
const bI = invertedIndex![b[i]];
const _bI = SortedArray.indexOf(unitB.elements, bI) as StructureElement.UnitIndex;
if (_bI < 0) continue;
const opA = operatorA[i];
const opB = operatorB[i];
if ((opA >= 0 && opA !== opKeyA && opA !== opKeyB) ||
(opB >= 0 && opB !== opKeyB && opB !== opKeyA)) continue;
const beI = getElementIdx(type_symbolA.value(bI));
const d = distance[i];
const dist = getDistance(unitA, aI, unitB, bI);
let add = false;
if (d >= 0) {
add = equalEps(dist, d, 0.3);
} else if (maxDistance >= 0) {
add = dist < maxDistance;
} else {
const pairingThreshold = getPairingThreshold(
aeI, beI, getElementThreshold(aeI), getElementThreshold(beI)
);
add = dist < pairingThreshold;
if (isHydrogen(aeI) && isHydrogen(beI)) {
// TODO handle molecular hydrogen
add = false;
}
}
if (add) {
builder.add(_aI, _bI, { order: order[i], flag: flag[i], key: key[i] });
}
}
continue; // assume `indexPairs` supplies all bonds
}
const structConnEntries = props.forceCompute ? void 0 : structConn && structConn.byAtomIndex.get(aI);
if (structConnEntries && structConnEntries.length) {
let added = false;
for (const se of structConnEntries) {
const { partnerA, partnerB } = se;
const p = partnerA.atomIndex === aI ? partnerB : partnerA;
const _bI = SortedArray.indexOf(unitB.elements, p.atomIndex) as StructureElement.UnitIndex;
if (_bI < 0) continue;
// check if the bond is within MAX_RADIUS for this pair of units
if (getDistance(unitA, aI, unitB, p.atomIndex) > maxRadius) continue;
builder.add(_aI, _bI, { order: se.order, flag: se.flags, key: se.rowIndex });
added = true;
}
// assume, for an atom, that if any inter unit bond is given
// all are given and thus we don't need to compute any other
if (added) continue;
}
if (structConnExhaustive) continue;
const occA = occupancyA.value(aI);
const { lookup3d } = unitB;
const { indices, count, squaredDistances } = lookup3d.find(_imageA[0], _imageA[1], _imageA[2], maxRadius);
if (count === 0) continue;
const aeI = getElementIdx(type_symbolA.value(aI));
const isHa = isHydrogen(aeI);
const thresholdA = getElementThreshold(aeI);
const altA = label_alt_idA.value(aI);
const metalA = MetalsSet.has(aeI);
const atomIdA = label_atom_idA.value(aI);
const compIdA = label_comp_idA.value(residueIndexA[aI]);
for (let ni = 0; ni < count; ni++) {
const _bI = indices[ni] as StructureElement.UnitIndex;
const bI = atomsB[_bI];
const altB = label_alt_idB.value(bI);
if (altA && altB && altA !== altB) continue;
// Do not include bonds between images of the same residue with partial occupancy.
// TODO: is this condition good enough?
// - It works for cases like 3WQJ (label_asym_id: I) which have partial occupancy.
// - Does NOT work for cases like 1RB8 (DC 7) with full occupancy.
if (hasOccupancy && occupancyB.value(bI) < 1 && occA < 1) {
if (auth_seq_idA.value(aI) === auth_seq_idB.value(bI)) {
continue; | }
}
const beI = getElementIdx(type_symbolB.value(bI)!);
const isHb = isHydrogen(beI);
if (isHa && isHb) continue;
const isMetal = (metalA || MetalsSet.has(beI)) && !(isHa || isHb);
const dist = Math.sqrt(squaredDistances[ni]);
if (dist === 0) continue;
const pairingThreshold = getPairingThreshold(aeI, beI, thresholdA, getElementThreshold(beI));
if (dist <= pairingThreshold) {
const atomIdB = label_atom_idB.value(bI);
const compIdB = label_comp_idB.value(residueIndexB[bI]);
builder.add(_aI, _bI, {
order: getInterBondOrderFromTable(compIdA, compIdB, atomIdA, atomIdB),
flag: (isMetal ? BondType.Flag.MetallicCoordination : BondType.Flag.Covalent) | BondType.Flag.Computed,
key: -1
});
}
}
}
builder.finishUnitPair();
}
export interface InterBondComputation | random_line_split |
|
inter-compute.ts | B: ElementIndex) {
unitA.conformation.position(indexA, tmpDistVecA);
unitB.conformation.position(indexB, tmpDistVecB);
return v3distance(tmpDistVecA, tmpDistVecB);
}
const _imageTransform = Mat4();
const _imageA = Vec3();
function | (unitA: Unit.Atomic, unitB: Unit.Atomic, props: BondComputationProps, builder: InterUnitGraph.Builder<number, StructureElement.UnitIndex, InterUnitEdgeProps>) {
const { maxRadius } = props;
const { elements: atomsA, residueIndex: residueIndexA } = unitA;
const { x: xA, y: yA, z: zA } = unitA.model.atomicConformation;
const { elements: atomsB, residueIndex: residueIndexB } = unitB;
const atomCount = unitA.elements.length;
const { type_symbol: type_symbolA, label_alt_id: label_alt_idA, label_atom_id: label_atom_idA, label_comp_id: label_comp_idA } = unitA.model.atomicHierarchy.atoms;
const { type_symbol: type_symbolB, label_alt_id: label_alt_idB, label_atom_id: label_atom_idB, label_comp_id: label_comp_idB } = unitB.model.atomicHierarchy.atoms;
const { auth_seq_id: auth_seq_idA } = unitA.model.atomicHierarchy.residues;
const { auth_seq_id: auth_seq_idB } = unitB.model.atomicHierarchy.residues;
const { occupancy: occupancyA } = unitA.model.atomicConformation;
const { occupancy: occupancyB } = unitB.model.atomicConformation;
const hasOccupancy = occupancyA.isDefined && occupancyB.isDefined;
const structConn = unitA.model === unitB.model && StructConn.Provider.get(unitA.model);
const indexPairs = !props.forceCompute && unitA.model === unitB.model && IndexPairBonds.Provider.get(unitA.model);
const { atomSourceIndex: sourceIndex } = unitA.model.atomicHierarchy;
const { invertedIndex } = indexPairs ? Model.getInvertedAtomSourceIndex(unitB.model) : { invertedIndex: void 0 };
const structConnExhaustive = unitA.model === unitB.model && StructConn.isExhaustive(unitA.model);
// the lookup queries need to happen in the "unitB space".
// that means _imageA = inverseOperB(operA(aI))
const imageTransform = Mat4.mul(_imageTransform, unitB.conformation.operator.inverse, unitA.conformation.operator.matrix);
const isNotIdentity = !Mat4.isIdentity(imageTransform);
const { center: bCenter, radius: bRadius } = unitB.boundary.sphere;
const testDistanceSq = (bRadius + maxRadius) * (bRadius + maxRadius);
builder.startUnitPair(unitA.id, unitB.id);
const opKeyA = unitA.conformation.operator.key;
const opKeyB = unitB.conformation.operator.key;
for (let _aI = 0 as StructureElement.UnitIndex; _aI < atomCount; _aI++) {
const aI = atomsA[_aI];
v3set(_imageA, xA[aI], yA[aI], zA[aI]);
if (isNotIdentity) v3transformMat4(_imageA, _imageA, imageTransform);
if (v3squaredDistance(_imageA, bCenter) > testDistanceSq) continue;
if (!props.forceCompute && indexPairs) {
const { maxDistance } = indexPairs;
const { offset, b, edgeProps: { order, distance, flag, key, operatorA, operatorB } } = indexPairs.bonds;
const srcA = sourceIndex.value(aI);
const aeI = getElementIdx(type_symbolA.value(aI));
for (let i = offset[srcA], il = offset[srcA + 1]; i < il; ++i) {
const bI = invertedIndex![b[i]];
const _bI = SortedArray.indexOf(unitB.elements, bI) as StructureElement.UnitIndex;
if (_bI < 0) continue;
const opA = operatorA[i];
const opB = operatorB[i];
if ((opA >= 0 && opA !== opKeyA && opA !== opKeyB) ||
(opB >= 0 && opB !== opKeyB && opB !== opKeyA)) continue;
const beI = getElementIdx(type_symbolA.value(bI));
const d = distance[i];
const dist = getDistance(unitA, aI, unitB, bI);
let add = false;
if (d >= 0) {
add = equalEps(dist, d, 0.3);
} else if (maxDistance >= 0) {
add = dist < maxDistance;
} else {
const pairingThreshold = getPairingThreshold(
aeI, beI, getElementThreshold(aeI), getElementThreshold(beI)
);
add = dist < pairingThreshold;
if (isHydrogen(aeI) && isHydrogen(beI)) {
// TODO handle molecular hydrogen
add = false;
}
}
if (add) {
builder.add(_aI, _bI, { order: order[i], flag: flag[i], key: key[i] });
}
}
continue; // assume `indexPairs` supplies all bonds
}
const structConnEntries = props.forceCompute ? void 0 : structConn && structConn.byAtomIndex.get(aI);
if (structConnEntries && structConnEntries.length) {
let added = false;
for (const se of structConnEntries) {
const { partnerA, partnerB } = se;
const p = partnerA.atomIndex === aI ? partnerB : partnerA;
const _bI = SortedArray.indexOf(unitB.elements, p.atomIndex) as StructureElement.UnitIndex;
if (_bI < 0) continue;
// check if the bond is within MAX_RADIUS for this pair of units
if (getDistance(unitA, aI, unitB, p.atomIndex) > maxRadius) continue;
builder.add(_aI, _bI, { order: se.order, flag: se.flags, key: se.rowIndex });
added = true;
}
// assume, for an atom, that if any inter unit bond is given
// all are given and thus we don't need to compute any other
if (added) continue;
}
if (structConnExhaustive) continue;
const occA = occupancyA.value(aI);
const { lookup3d } = unitB;
const { indices, count, squaredDistances } = lookup3d.find(_imageA[0], _imageA[1], _imageA[2], maxRadius);
if (count === 0) continue;
const aeI = getElementIdx(type_symbolA.value(aI));
const isHa = isHydrogen(aeI);
const thresholdA = getElementThreshold(aeI);
const altA = label_alt_idA.value(aI);
const metalA = MetalsSet.has(aeI);
const atomIdA = label_atom_idA.value(aI);
const compIdA = label_comp_idA.value(residueIndexA[aI]);
for (let ni = 0; ni < count; ni++) {
const _bI = indices[ni] as StructureElement.UnitIndex;
const bI = atomsB[_bI];
const altB = label_alt_idB.value(bI);
if (altA && altB && altA !== altB) continue;
// Do not include bonds between images of the same residue with partial occupancy.
// TODO: is this condition good enough?
// - It works for cases like 3WQJ (label_asym_id: I) which have partial occupancy.
// - Does NOT work for cases like 1RB8 (DC 7) with full occupancy.
if (hasOccupancy && occupancyB.value(bI) < 1 && occA < 1) {
if (auth_seq_idA.value(aI) === auth_seq_idB.value(bI)) {
continue;
}
}
const beI = getElementIdx(type_symbolB.value(bI)!);
const isHb = isHydrogen(beI);
if (isHa && isHb) continue;
const isMetal = (metalA || MetalsSet.has(beI)) && !(isHa || isHb);
const dist = Math.sqrt(squaredDistances[ni]);
if (dist === 0) continue;
const pairingThreshold = getPairingThreshold(aeI, beI, thresholdA, getElementThreshold(beI));
if (dist <= pairingThreshold) {
const atomIdB = label_atom_idB.value(bI);
const compIdB = label_comp_idB.value(residueIndexB[bI]);
builder.add(_aI, _bI, {
order: getInterBondOrderFromTable(compIdA, compIdB, atomIdA, atomIdB),
flag: (isMetal ? BondType.Flag.MetallicCoordination : BondType.Flag.Covalent) | BondType.Flag.Computed,
key: -1
});
}
}
}
builder.finishUnitPair();
}
export interface InterBondComputation | findPairBonds | identifier_name |
update_menu.rs | ly_buttonclicked` method
/// and executing the API command manually without losing the benefit of
/// the updatemenu automatically binding to the state of the plot through
/// the specification of `method` and `args`.
///
/// Default: true
execute: Option<bool>,
/// Sets the text label to appear on the button.
label: Option<String>,
/// Sets the Plotly method to be called on click. If the `skip` method is
/// used, the API updatemenu will function as normal but will perform no
/// API calls and will not bind automatically to state updates. This may
/// be used to create a component interface and attach to updatemenu
/// events manually via JavaScript.
method: Option<ButtonMethod>,
/// When used in a template, named items are created in the output figure in
/// addition to any items the figure already has in this array. You can
/// modify these items in the output figure by making your own item with
/// `templateitemname` matching this `name` alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). Has no
/// effect outside of a template.
name: Option<String>,
/// Used to refer to a named item in this array in the template. Named items
/// from the template will be created even without a matching item in
/// the input figure, but you can modify one by making an item with
/// `templateitemname` matching its `name`, alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). If there is
/// no template or no matching item, this item will be hidden unless you
/// explicitly show it with `visible: true`
#[serde(rename = "templateitemname")]
template_item_name: Option<String>,
/// Determines whether or not this button is visible.
visible: Option<bool>,
}
impl Button {
pub fn new() -> Self {
Default::default()
}
}
/// Builder struct to create buttons which can do restyles and/or relayouts
#[derive(FieldSetter)]
pub struct ButtonBuilder {
label: Option<String>,
name: Option<String>,
template_item_name: Option<String>,
visible: Option<bool>,
#[field_setter(default = "Map::new()")]
restyles: Map<String, Value>,
#[field_setter(default = "Map::new()")]
relayouts: Map<String, Value>,
}
impl ButtonBuilder {
pub fn new() -> Self {
Default::default()
}
pub fn push_restyle(mut self, restyle: impl Restyle + Serialize) -> Self {
let restyle = serde_json::to_value(&restyle).unwrap();
for (k, v) in restyle.as_object().unwrap() {
self.restyles.insert(k.clone(), v.clone());
}
self
}
pub fn push_relayout(mut self, relayout: impl Relayout + Serialize) -> Self {
let relayout = serde_json::to_value(&relayout).unwrap();
for (k, v) in relayout.as_object().unwrap() {
self.relayouts.insert(k.clone(), v.clone());
}
self
}
fn method_and_args(
restyles: Map<String, Value>,
relayouts: Map<String, Value>,
) -> (ButtonMethod, Value) {
match (restyles.is_empty(), relayouts.is_empty()) {
(true, true) => (ButtonMethod::Skip, Value::Null),
(false, true) => (ButtonMethod::Restyle, vec![restyles].into()),
(true, false) => (ButtonMethod::Relayout, vec![relayouts].into()),
(false, false) => (ButtonMethod::Update, vec![restyles, relayouts].into()),
}
}
pub fn build(self) -> Button {
let (method, args) = Self::method_and_args(self.restyles, self.relayouts);
Button {
label: self.label,
args: Some(args),
method: Some(method),
name: self.name,
template_item_name: self.template_item_name,
visible: self.visible,
..Default::default()
}
}
}
/// Determines whether the buttons are accessible via a dropdown menu or whether
/// the buttons are stacked horizontally or vertically
///
/// Default: "dropdown"
#[derive(Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum UpdateMenuType {
Dropdown,
Buttons,
}
/// Determines the direction in which the buttons are laid out, whether in a
/// dropdown menu or a row/column of buttons. For `left` and `up`, the buttons
/// will still appear in left-to-right or top-to-bottom order respectively.
///
/// Default: "down"
#[derive(Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum UpdateMenuDirection {
Left,
Right,
Up,
Down,
}
#[serde_with::skip_serializing_none]
#[derive(Serialize, Debug, FieldSetter, Clone)]
pub struct UpdateMenu {
/// Determines which button (by index starting from 0) is considered active.
active: Option<i32>,
/// Sets the background color of the update menu buttons.
#[serde(rename = "bgcolor")]
background_color: Option<Box<dyn Color>>,
/// Sets the color of the border enclosing the update menu.
#[serde(rename = "bordercolor")]
border_color: Option<Box<dyn Color>>,
/// Sets the width (in px) of the border enclosing the update menu.
#[serde(rename = "borderwidth")]
border_width: Option<usize>,
buttons: Option<Vec<Button>>,
/// Determines the direction in which the buttons are laid out, whether in
/// a dropdown menu or a row/column of buttons. For `left` and `up`,
/// the buttons will still appear in left-to-right or top-to-bottom order
/// respectively.
direction: Option<UpdateMenuDirection>,
/// Sets the font of the update menu button text.
font: Option<Font>,
/// When used in a template, named items are created in the output figure in
/// addition to any items the figure already has in this array. You can
/// modify these items in the output figure by making your own item with
/// `templateitemname` matching this `name` alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). Has no
/// effect outside of a template.
name: Option<String>,
/// Sets the padding around the buttons or dropdown menu.
pad: Option<Pad>,
/// Highlights active dropdown item or active button if true.
#[serde(rename = "showactive")]
show_active: Option<bool>,
/// Used to refer to a named item in this array in the template. Named items
/// from the template will be created even without a matching item in
/// the input figure, but you can modify one by making an item with
/// `templateitemname` matching its `name`, alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). If there is
/// no template or no matching item, this item will be hidden unless you
/// explicitly show it with `visible: true`.
template_item_name: Option<String>,
/// Determines whether the buttons are accessible via a dropdown menu or
/// whether the buttons are stacked horizontally or vertically
#[serde(rename = "type")]
ty: Option<UpdateMenuType>,
/// Determines whether or not the update menu is visible.
visible: Option<bool>,
/// Type: number between or equal to -2 and 3
/// Default: -0.05
/// Sets the x position (in normalized coordinates) of the update menu.
x: Option<f64>,
/// Sets the update menu's horizontal position anchor. This anchor binds the
/// `x` position to the "left", "center" or "right" of the range
/// selector. Default: "right"
#[serde(rename = "xanchor")]
x_anchor: Option<Anchor>,
/// Type: number between or equal to -2 and 3
/// Default: 1
/// Sets the y position (in normalized coordinates) of the update menu.
y: Option<f64>,
/// Sets the update menu's vertical position anchor This anchor binds the
/// `y` position to the "top", "middle" or "bottom" of the range
/// selector. Default: "top"
#[serde(rename = "yanchor")]
y_anchor: Option<Anchor>,
}
impl UpdateMenu {
pub fn new() -> Self {
Default::default()
}
}
#[cfg(test)]
mod tests {
use serde_json::{json, to_value};
use super::*;
use crate::{
common::{Title, Visible},
Layout,
};
#[test]
fn test_serialize_button_method() {
assert_eq!(to_value(ButtonMethod::Restyle).unwrap(), json!("restyle"));
assert_eq!(to_value(ButtonMethod::Relayout).unwrap(), json!("relayout"));
assert_eq!(to_value(ButtonMethod::Animate).unwrap(), json!("animate"));
assert_eq!(to_value(ButtonMethod::Update).unwrap(), json!("update"));
assert_eq!(to_value(ButtonMethod::Skip).unwrap(), json!("skip"));
}
#[test]
fn | test_serialize_button | identifier_name |
|
update_menu.rs | use plotly_derive::FieldSetter;
use serde::Serialize;
use serde_json::{Map, Value};
use crate::{
color::Color,
common::{Anchor, Font, Pad},
Relayout, Restyle,
};
/// Sets the Plotly method to be called on click. If the `skip` method is used,
/// the API updatemenu will function as normal but will perform no API calls and
/// will not bind automatically to state updates. This may be used to create a
/// component interface and attach to updatemenu events manually via JavaScript.
#[derive(Serialize, Debug, Copy, Clone)]
#[serde(rename_all = "snake_case")]
pub enum ButtonMethod {
/// The restyle method should be used when modifying the data and data
/// attributes of the graph
Restyle,
/// The relayout method should be used when modifying the layout attributes
/// of the graph.
Relayout,
Animate,
/// The update method should be used when modifying the data and layout
/// sections of the graph.
Update,
Skip,
}
#[serde_with::skip_serializing_none]
#[derive(Serialize, Clone, Debug, FieldSetter)]
pub struct Button {
/// Sets the arguments values to be passed to the Plotly method set in
/// `method` on click.
args: Option<Value>,
/// Sets a 2nd set of `args`, these arguments values are passed to the
/// Plotly method set in `method` when clicking this button while in the
/// active state. Use this to create toggle buttons.
args2: Option<Value>,
/// When true, the API method is executed. When false, all other behaviors
/// are the same and command execution is skipped. This may be useful
/// when hooking into, for example, the `plotly_buttonclicked` method
/// and executing the API command manually without losing the benefit of
/// the updatemenu automatically binding to the state of the plot through
/// the specification of `method` and `args`.
///
/// Default: true
execute: Option<bool>,
/// Sets the text label to appear on the button.
label: Option<String>,
/// Sets the Plotly method to be called on click. If the `skip` method is
/// used, the API updatemenu will function as normal but will perform no
/// API calls and will not bind automatically to state updates. This may
/// be used to create a component interface and attach to updatemenu
/// events manually via JavaScript.
method: Option<ButtonMethod>,
/// When used in a template, named items are created in the output figure in
/// addition to any items the figure already has in this array. You can
/// modify these items in the output figure by making your own item with
/// `templateitemname` matching this `name` alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). Has no
/// effect outside of a template.
name: Option<String>,
/// Used to refer to a named item in this array in the template. Named items
/// from the template will be created even without a matching item in
/// the input figure, but you can modify one by making an item with
/// `templateitemname` matching its `name`, alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). If there is
/// no template or no matching item, this item will be hidden unless you
/// explicitly show it with `visible: true`
#[serde(rename = "templateitemname")]
template_item_name: Option<String>,
/// Determines whether or not this button is visible.
visible: Option<bool>,
}
impl Button {
pub fn new() -> Self {
Default::default()
}
}
/// Builder struct to create buttons which can do restyles and/or relayouts
#[derive(FieldSetter)]
pub struct ButtonBuilder {
label: Option<String>,
name: Option<String>,
template_item_name: Option<String>,
visible: Option<bool>,
#[field_setter(default = "Map::new()")]
restyles: Map<String, Value>,
#[field_setter(default = "Map::new()")]
relayouts: Map<String, Value>,
}
impl ButtonBuilder {
pub fn new() -> Self {
Default::default()
}
pub fn push_restyle(mut self, restyle: impl Restyle + Serialize) -> Self {
let restyle = serde_json::to_value(&restyle).unwrap();
for (k, v) in restyle.as_object().unwrap() {
self.restyles.insert(k.clone(), v.clone());
}
self
}
pub fn push_relayout(mut self, relayout: impl Relayout + Serialize) -> Self {
let relayout = serde_json::to_value(&relayout).unwrap();
for (k, v) in relayout.as_object().unwrap() {
self.relayouts.insert(k.clone(), v.clone());
}
self
}
fn method_and_args(
restyles: Map<String, Value>,
relayouts: Map<String, Value>,
) -> (ButtonMethod, Value) {
match (restyles.is_empty(), relayouts.is_empty()) {
(true, true) => (ButtonMethod::Skip, Value::Null),
(false, true) => (ButtonMethod::Restyle, vec![restyles].into()),
(true, false) => (ButtonMethod::Relayout, vec![relayouts].into()),
(false, false) => (ButtonMethod::Update, vec![restyles, relayouts].into()),
}
}
pub fn build(self) -> Button {
let (method, args) = Self::method_and_args(self.restyles, self.relayouts);
Button {
label: self.label,
args: Some(args),
method: Some(method),
name: self.name,
template_item_name: self.template_item_name,
visible: self.visible,
..Default::default()
}
}
}
/// Determines whether the buttons are accessible via a dropdown menu or whether
/// the buttons are stacked horizontally or vertically
///
/// Default: "dropdown"
#[derive(Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum UpdateMenuType {
Dropdown,
Buttons,
}
/// Determines the direction in which the buttons are laid out, whether in a
/// dropdown menu or a row/column of buttons. For `left` and `up`, the buttons
/// will still appear in left-to-right or top-to-bottom order respectively.
///
/// Default: "down"
#[derive(Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum UpdateMenuDirection {
Left,
Right,
Up,
Down,
}
#[serde_with::skip_serializing_none]
#[derive(Serialize, Debug, FieldSetter, Clone)]
pub struct UpdateMenu {
/// Determines which button (by index starting from 0) is considered active.
active: Option<i32>,
/// Sets the background color of the update menu buttons.
#[serde(rename = "bgcolor")]
background_color: Option<Box<dyn Color>>,
/// Sets the color of the border enclosing the update menu.
#[serde(rename = "bordercolor")]
border_color: Option<Box<dyn Color>>,
/// Sets the width (in px) of the border enclosing the update menu.
#[serde(rename = "borderwidth")]
border_width: Option<usize>,
buttons: Option<Vec<Button>>,
/// Determines the direction in which the buttons are laid out, whether in
/// a dropdown menu or a row/column of buttons. For `left` and `up`,
/// the buttons will still appear in left-to-right or top-to-bottom order
/// respectively.
direction: Option<UpdateMenuDirection>,
/// Sets the font of the update menu button text.
font: Option<Font>,
/// When used in a template, named items are created in the output figure in
/// addition to any items the figure already has in this array. You can
/// modify these items in the output figure by making your own item with
/// `templateitemname` matching this `name` alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). Has no
/// effect outside of a template.
name: Option<String>,
/// Sets the padding around the buttons or dropdown menu.
pad: Option<Pad>,
/// Highlights active dropdown item or active button if true.
#[serde(rename = "showactive")]
show_active: Option<bool>,
/// Used to refer to a named item in this array in the template. Named items
/// from the template will be created even without a matching item in
/// the input figure, but you can modify one by making an item with
/// `templateitemname` matching its `name`, alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). If there is
/// no template or no matching item, this item will be hidden unless you
/// explicitly show it with `visible: true`.
template_item_name: Option<String>,
/// Determines whether the buttons are accessible via a dropdown menu or
/// whether the buttons are stacked horizontally or vertically
#[serde(rename = "type")]
ty: Option<UpdateMenuType>,
/// Determines whether or not the update menu is visible.
visible: Option | //! Buttons and Dropdowns.
| random_line_split |
|
update_menu.rs | and will not bind automatically to state updates. This may
/// be used to create a component interface and attach to updatemenu
/// events manually via JavaScript.
method: Option<ButtonMethod>,
/// When used in a template, named items are created in the output figure in
/// addition to any items the figure already has in this array. You can
/// modify these items in the output figure by making your own item with
/// `templateitemname` matching this `name` alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). Has no
/// effect outside of a template.
name: Option<String>,
/// Used to refer to a named item in this array in the template. Named items
/// from the template will be created even without a matching item in
/// the input figure, but you can modify one by making an item with
/// `templateitemname` matching its `name`, alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). If there is
/// no template or no matching item, this item will be hidden unless you
/// explicitly show it with `visible: true`
#[serde(rename = "templateitemname")]
template_item_name: Option<String>,
/// Determines whether or not this button is visible.
visible: Option<bool>,
}
impl Button {
pub fn new() -> Self {
Default::default()
}
}
/// Builder struct to create buttons which can do restyles and/or relayouts
#[derive(FieldSetter)]
pub struct ButtonBuilder {
label: Option<String>,
name: Option<String>,
template_item_name: Option<String>,
visible: Option<bool>,
#[field_setter(default = "Map::new()")]
restyles: Map<String, Value>,
#[field_setter(default = "Map::new()")]
relayouts: Map<String, Value>,
}
impl ButtonBuilder {
pub fn new() -> Self {
Default::default()
}
pub fn push_restyle(mut self, restyle: impl Restyle + Serialize) -> Self {
let restyle = serde_json::to_value(&restyle).unwrap();
for (k, v) in restyle.as_object().unwrap() {
self.restyles.insert(k.clone(), v.clone());
}
self
}
pub fn push_relayout(mut self, relayout: impl Relayout + Serialize) -> Self {
let relayout = serde_json::to_value(&relayout).unwrap();
for (k, v) in relayout.as_object().unwrap() {
self.relayouts.insert(k.clone(), v.clone());
}
self
}
fn method_and_args(
restyles: Map<String, Value>,
relayouts: Map<String, Value>,
) -> (ButtonMethod, Value) {
match (restyles.is_empty(), relayouts.is_empty()) {
(true, true) => (ButtonMethod::Skip, Value::Null),
(false, true) => (ButtonMethod::Restyle, vec![restyles].into()),
(true, false) => (ButtonMethod::Relayout, vec![relayouts].into()),
(false, false) => (ButtonMethod::Update, vec![restyles, relayouts].into()),
}
}
pub fn build(self) -> Button {
let (method, args) = Self::method_and_args(self.restyles, self.relayouts);
Button {
label: self.label,
args: Some(args),
method: Some(method),
name: self.name,
template_item_name: self.template_item_name,
visible: self.visible,
..Default::default()
}
}
}
/// Determines whether the buttons are accessible via a dropdown menu or whether
/// the buttons are stacked horizontally or vertically
///
/// Default: "dropdown"
#[derive(Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum UpdateMenuType {
Dropdown,
Buttons,
}
/// Determines the direction in which the buttons are laid out, whether in a
/// dropdown menu or a row/column of buttons. For `left` and `up`, the buttons
/// will still appear in left-to-right or top-to-bottom order respectively.
///
/// Default: "down"
#[derive(Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum UpdateMenuDirection {
Left,
Right,
Up,
Down,
}
#[serde_with::skip_serializing_none]
#[derive(Serialize, Debug, FieldSetter, Clone)]
pub struct UpdateMenu {
/// Determines which button (by index starting from 0) is considered active.
active: Option<i32>,
/// Sets the background color of the update menu buttons.
#[serde(rename = "bgcolor")]
background_color: Option<Box<dyn Color>>,
/// Sets the color of the border enclosing the update menu.
#[serde(rename = "bordercolor")]
border_color: Option<Box<dyn Color>>,
/// Sets the width (in px) of the border enclosing the update menu.
#[serde(rename = "borderwidth")]
border_width: Option<usize>,
buttons: Option<Vec<Button>>,
/// Determines the direction in which the buttons are laid out, whether in
/// a dropdown menu or a row/column of buttons. For `left` and `up`,
/// the buttons will still appear in left-to-right or top-to-bottom order
/// respectively.
direction: Option<UpdateMenuDirection>,
/// Sets the font of the update menu button text.
font: Option<Font>,
/// When used in a template, named items are created in the output figure in
/// addition to any items the figure already has in this array. You can
/// modify these items in the output figure by making your own item with
/// `templateitemname` matching this `name` alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). Has no
/// effect outside of a template.
name: Option<String>,
/// Sets the padding around the buttons or dropdown menu.
pad: Option<Pad>,
/// Highlights active dropdown item or active button if true.
#[serde(rename = "showactive")]
show_active: Option<bool>,
/// Used to refer to a named item in this array in the template. Named items
/// from the template will be created even without a matching item in
/// the input figure, but you can modify one by making an item with
/// `templateitemname` matching its `name`, alongside your modifications
/// (including `visible: false` or `enabled: false` to hide it). If there is
/// no template or no matching item, this item will be hidden unless you
/// explicitly show it with `visible: true`.
template_item_name: Option<String>,
/// Determines whether the buttons are accessible via a dropdown menu or
/// whether the buttons are stacked horizontally or vertically
#[serde(rename = "type")]
ty: Option<UpdateMenuType>,
/// Determines whether or not the update menu is visible.
visible: Option<bool>,
/// Type: number between or equal to -2 and 3
/// Default: -0.05
/// Sets the x position (in normalized coordinates) of the update menu.
x: Option<f64>,
/// Sets the update menu's horizontal position anchor. This anchor binds the
/// `x` position to the "left", "center" or "right" of the range
/// selector. Default: "right"
#[serde(rename = "xanchor")]
x_anchor: Option<Anchor>,
/// Type: number between or equal to -2 and 3
/// Default: 1
/// Sets the y position (in normalized coordinates) of the update menu.
y: Option<f64>,
/// Sets the update menu's vertical position anchor This anchor binds the
/// `y` position to the "top", "middle" or "bottom" of the range
/// selector. Default: "top"
#[serde(rename = "yanchor")]
y_anchor: Option<Anchor>,
}
impl UpdateMenu {
pub fn new() -> Self {
Default::default()
}
}
#[cfg(test)]
mod tests {
use serde_json::{json, to_value};
use super::*;
use crate::{
common::{Title, Visible},
Layout,
};
#[test]
fn test_serialize_button_method() {
assert_eq!(to_value(ButtonMethod::Restyle).unwrap(), json!("restyle"));
assert_eq!(to_value(ButtonMethod::Relayout).unwrap(), json!("relayout"));
assert_eq!(to_value(ButtonMethod::Animate).unwrap(), json!("animate"));
assert_eq!(to_value(ButtonMethod::Update).unwrap(), json!("update"));
assert_eq!(to_value(ButtonMethod::Skip).unwrap(), json!("skip"));
}
#[test]
fn test_serialize_button() | {
let button = Button::new()
.args(json!([
{ "visible": [true, false] },
{ "width": 20},
]))
.args2(json!([]))
.execute(true)
.label("Label")
.method(ButtonMethod::Update)
.name("Name")
.template_item_name("Template")
.visible(true);
let expected = json!({
"args": [
{ "visible": [true, false] },
{ "width": 20},
],
"args2": [], | identifier_body |
|
index_lookup.rs | ::{Fields, TermIterator, Terms};
use core::codec::{PostingIterator, PostingIteratorFlags};
use core::doc::Term;
use core::search::{Payload, NO_MORE_DOCS};
use core::util::DocId;
use error::{ErrorKind::IllegalState, Result};
use std::collections::hash_map::HashMap;
#[derive(Debug, Serialize, Deserialize)]
pub struct TermPosition {
pub position: i32,
pub start_offset: i32,
pub end_offset: i32,
pub payload: Payload,
}
impl Default for TermPosition {
fn default() -> Self {
TermPosition::new()
}
}
impl TermPosition {
pub fn new() -> TermPosition {
TermPosition {
position: -1,
start_offset: -1,
end_offset: -1,
payload: Payload::with_capacity(0),
}
}
pub fn payload_as_string(&mut self) -> String {
if self.payload.is_empty() {
unimplemented!()
} else {
unimplemented!()
}
}
pub fn payload_as_float(&mut self, default: f32) -> f32 {
if self.payload.is_empty() {
default
} else {
unimplemented!()
}
}
pub fn payload_as_int(&mut self, default: i32) -> i32 {
if self.payload.is_empty() {
default
} else {
unimplemented!()
}
}
}
/// Holds all information on a particular term in a field.
pub struct LeafIndexFieldTerm<T: PostingIterator> {
postings: Option<T>,
flags: u16,
iterator: LeafPositionIterator,
#[allow(dead_code)]
identifier: Term,
freq: i32,
}
impl<T: PostingIterator> LeafIndexFieldTerm<T> {
pub fn new<TI: TermIterator<Postings = T>, Tm: Terms<Iterator = TI>, F: Fields<Terms = Tm>>(
term: &str,
field_name: &str,
flags: u16,
doc_id: DocId,
fields: &F,
) -> Result<Self> {
let identifier = Term::new(field_name.to_string(), term.as_bytes().to_vec());
if let Some(terms) = fields.terms(identifier.field())? {
let mut terms_iterator = terms.iterator()?;
let (postings, freq) = if terms_iterator.seek_exact(identifier.bytes.as_slice())? {
let mut posting = terms_iterator.postings_with_flags(flags)?;
let mut current_doc_pos = posting.doc_id();
if current_doc_pos < doc_id {
current_doc_pos = posting.advance(doc_id)?;
}
let freq = if current_doc_pos == doc_id {
posting.freq()?
} else {
0
};
(Some(posting), freq)
} else {
(None, 0)
};
let mut iterator = LeafPositionIterator::new();
iterator.resetted = false;
iterator.current_pos = 0;
iterator.freq = freq;
Ok(LeafIndexFieldTerm {
postings,
flags,
iterator,
identifier,
freq,
})
} else {
bail!(IllegalState(format!(
"Terms {} for doc {} - field '{}' must not be none!",
term, doc_id, field_name
)));
}
}
pub fn tf(&self) -> i32 {
self.freq
}
fn current_doc(&self) -> DocId {
if let Some(ref postings) = self.postings {
postings.doc_id()
} else |
}
pub fn set_document(&mut self, doc_id: i32) -> Result<()> {
let mut current_doc_pos = self.current_doc();
if current_doc_pos < doc_id {
current_doc_pos = self.postings.as_mut().unwrap().advance(doc_id)?;
}
if current_doc_pos == doc_id && doc_id < NO_MORE_DOCS {
self.freq = self.postings.as_ref().unwrap().freq()?;
} else {
self.freq = 0;
}
self.next_doc();
Ok(())
}
pub fn validate_flags(&self, flags2: u16) -> Result<()> {
if (self.flags & flags2) < flags2 {
panic!(
"You must call get with all required flags! Instead of {} call {} once",
flags2,
flags2 | self.flags
)
} else {
Ok(())
}
}
pub fn has_next(&self) -> bool {
self.iterator.current_pos < self.iterator.freq
}
pub fn next_pos(&mut self) -> Result<TermPosition> {
let term_pos = if let Some(ref mut postings) = self.postings {
TermPosition {
position: postings.next_position()?,
start_offset: postings.start_offset()?,
end_offset: postings.end_offset()?,
payload: postings.payload()?,
}
} else {
TermPosition {
position: -1,
start_offset: -1,
end_offset: -1,
payload: Vec::with_capacity(0),
}
};
self.iterator.current_pos += 1;
Ok(term_pos)
}
pub fn next_doc(&mut self) {
self.iterator.resetted = false;
self.iterator.current_pos = 0;
self.iterator.freq = self.tf();
}
pub fn reset(&mut self) -> Result<()> {
if self.iterator.resetted {
panic!(
"Cannot iterate twice! If you want to iterate more that once, add _CACHE \
explicitly."
)
}
self.iterator.resetted = true;
Ok(())
}
}
pub struct LeafPositionIterator {
resetted: bool,
freq: i32,
current_pos: i32,
}
impl Default for LeafPositionIterator {
fn default() -> Self {
LeafPositionIterator::new()
}
}
impl LeafPositionIterator {
pub fn new() -> LeafPositionIterator {
LeafPositionIterator {
resetted: false,
freq: -1,
current_pos: 0,
}
}
}
pub struct LeafIndexField<T: Fields> {
terms: HashMap<
String,
LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>,
>,
field_name: String,
doc_id: DocId,
fields: T,
}
///
// Script interface to all information regarding a field.
//
impl<T: Fields + Clone> LeafIndexField<T> {
pub fn new(field_name: &str, doc_id: DocId, fields: T) -> Self {
LeafIndexField {
terms: HashMap::new(),
field_name: String::from(field_name),
doc_id,
fields,
}
}
pub fn get(
&mut self,
key: &str,
) -> Result<&mut LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>>
{
self.get_with_flags(key, PostingIteratorFlags::FREQS)
}
// TODO: might be good to get the field lengths here somewhere?
// Returns a TermInfo object that can be used to access information on
// specific terms. flags can be set as described in TermInfo.
// TODO: here might be potential for running time improvement? If we knew in
// advance which terms are requested, we could provide an array which the
// user could then iterate over.
//
pub fn get_with_flags(
&mut self,
key: &str,
flags: u16,
) -> Result<&mut LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>>
{
if !self.terms.contains_key(key) {
let index_field_term =
LeafIndexFieldTerm::new(key, &self.field_name, flags, self.doc_id, &self.fields)?;
index_field_term.validate_flags(flags)?;
self.terms.insert(String::from(key), index_field_term);
}
let index_field_term_ref = self.terms.get_mut(key).unwrap();
index_field_term_ref.validate_flags(flags)?;
Ok(index_field_term_ref)
}
pub fn set_doc_id_in_terms(&mut self, doc_id: DocId) -> Result<()> {
for ti in self.terms.values_mut() {
ti.set_document(doc_id)?;
}
Ok(())
}
}
pub struct LeafIndexLookup<T: Fields> {
pub fields: T,
pub doc_id: DocId,
index_fields: HashMap<String, LeafIndexField<T>>,
#[allow(dead_code)]
num_docs: i32,
#[allow(dead_code)]
max_doc: i32,
#[allow(dead_code)]
num_deleted_docs: i32,
}
impl<T: Fields + Clone> LeafIndexLookup<T> {
pub fn new(fields: T) -> LeafIndexLookup<T> {
LeafIndexLookup {
fields,
doc_id: -1,
index_fields: HashMap::new(),
num_docs: -1,
max_doc: -1,
num_deleted_docs: -1,
}
}
pub fn set_document(&mut self, doc_id: DocId) -> Result<()> {
if self.doc_id == doc_id {
return Ok(());
}
// We assume that docs are processed in ascending order of id. If this
// is not the case, we would have to re initialize all posting lists in
// IndexFieldTerm. TODO | {
NO_MORE_DOCS
} | conditional_block |
index_lookup.rs | ::{Fields, TermIterator, Terms};
use core::codec::{PostingIterator, PostingIteratorFlags};
use core::doc::Term;
use core::search::{Payload, NO_MORE_DOCS};
use core::util::DocId;
use error::{ErrorKind::IllegalState, Result};
use std::collections::hash_map::HashMap;
#[derive(Debug, Serialize, Deserialize)]
pub struct TermPosition {
pub position: i32,
pub start_offset: i32,
pub end_offset: i32,
pub payload: Payload,
}
impl Default for TermPosition {
fn default() -> Self {
TermPosition::new()
}
}
impl TermPosition {
pub fn new() -> TermPosition {
TermPosition {
position: -1,
start_offset: -1,
end_offset: -1,
payload: Payload::with_capacity(0),
}
}
pub fn payload_as_string(&mut self) -> String {
if self.payload.is_empty() {
unimplemented!()
} else {
unimplemented!()
}
}
pub fn payload_as_float(&mut self, default: f32) -> f32 {
if self.payload.is_empty() {
default
} else {
unimplemented!()
}
}
pub fn payload_as_int(&mut self, default: i32) -> i32 {
if self.payload.is_empty() {
default
} else {
unimplemented!()
}
}
}
/// Holds all information on a particular term in a field.
pub struct LeafIndexFieldTerm<T: PostingIterator> {
postings: Option<T>,
flags: u16,
iterator: LeafPositionIterator,
#[allow(dead_code)]
identifier: Term,
freq: i32,
}
impl<T: PostingIterator> LeafIndexFieldTerm<T> {
pub fn new<TI: TermIterator<Postings = T>, Tm: Terms<Iterator = TI>, F: Fields<Terms = Tm>>(
term: &str,
field_name: &str,
flags: u16,
doc_id: DocId,
fields: &F,
) -> Result<Self> {
let identifier = Term::new(field_name.to_string(), term.as_bytes().to_vec());
if let Some(terms) = fields.terms(identifier.field())? {
let mut terms_iterator = terms.iterator()?;
let (postings, freq) = if terms_iterator.seek_exact(identifier.bytes.as_slice())? {
let mut posting = terms_iterator.postings_with_flags(flags)?;
let mut current_doc_pos = posting.doc_id();
if current_doc_pos < doc_id {
current_doc_pos = posting.advance(doc_id)?;
}
let freq = if current_doc_pos == doc_id {
posting.freq()?
} else {
0
};
(Some(posting), freq)
} else {
(None, 0)
};
let mut iterator = LeafPositionIterator::new();
iterator.resetted = false;
iterator.current_pos = 0;
iterator.freq = freq;
Ok(LeafIndexFieldTerm {
postings,
flags,
iterator,
identifier,
freq,
})
} else {
bail!(IllegalState(format!(
"Terms {} for doc {} - field '{}' must not be none!",
term, doc_id, field_name
)));
}
}
pub fn tf(&self) -> i32 |
fn current_doc(&self) -> DocId {
if let Some(ref postings) = self.postings {
postings.doc_id()
} else {
NO_MORE_DOCS
}
}
pub fn set_document(&mut self, doc_id: i32) -> Result<()> {
let mut current_doc_pos = self.current_doc();
if current_doc_pos < doc_id {
current_doc_pos = self.postings.as_mut().unwrap().advance(doc_id)?;
}
if current_doc_pos == doc_id && doc_id < NO_MORE_DOCS {
self.freq = self.postings.as_ref().unwrap().freq()?;
} else {
self.freq = 0;
}
self.next_doc();
Ok(())
}
pub fn validate_flags(&self, flags2: u16) -> Result<()> {
if (self.flags & flags2) < flags2 {
panic!(
"You must call get with all required flags! Instead of {} call {} once",
flags2,
flags2 | self.flags
)
} else {
Ok(())
}
}
pub fn has_next(&self) -> bool {
self.iterator.current_pos < self.iterator.freq
}
pub fn next_pos(&mut self) -> Result<TermPosition> {
let term_pos = if let Some(ref mut postings) = self.postings {
TermPosition {
position: postings.next_position()?,
start_offset: postings.start_offset()?,
end_offset: postings.end_offset()?,
payload: postings.payload()?,
}
} else {
TermPosition {
position: -1,
start_offset: -1,
end_offset: -1,
payload: Vec::with_capacity(0),
}
};
self.iterator.current_pos += 1;
Ok(term_pos)
}
pub fn next_doc(&mut self) {
self.iterator.resetted = false;
self.iterator.current_pos = 0;
self.iterator.freq = self.tf();
}
pub fn reset(&mut self) -> Result<()> {
if self.iterator.resetted {
panic!(
"Cannot iterate twice! If you want to iterate more that once, add _CACHE \
explicitly."
)
}
self.iterator.resetted = true;
Ok(())
}
}
pub struct LeafPositionIterator {
resetted: bool,
freq: i32,
current_pos: i32,
}
impl Default for LeafPositionIterator {
fn default() -> Self {
LeafPositionIterator::new()
}
}
impl LeafPositionIterator {
pub fn new() -> LeafPositionIterator {
LeafPositionIterator {
resetted: false,
freq: -1,
current_pos: 0,
}
}
}
pub struct LeafIndexField<T: Fields> {
terms: HashMap<
String,
LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>,
>,
field_name: String,
doc_id: DocId,
fields: T,
}
///
// Script interface to all information regarding a field.
//
impl<T: Fields + Clone> LeafIndexField<T> {
pub fn new(field_name: &str, doc_id: DocId, fields: T) -> Self {
LeafIndexField {
terms: HashMap::new(),
field_name: String::from(field_name),
doc_id,
fields,
}
}
pub fn get(
&mut self,
key: &str,
) -> Result<&mut LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>>
{
self.get_with_flags(key, PostingIteratorFlags::FREQS)
}
// TODO: might be good to get the field lengths here somewhere?
// Returns a TermInfo object that can be used to access information on
// specific terms. flags can be set as described in TermInfo.
// TODO: here might be potential for running time improvement? If we knew in
// advance which terms are requested, we could provide an array which the
// user could then iterate over.
//
pub fn get_with_flags(
&mut self,
key: &str,
flags: u16,
) -> Result<&mut LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>>
{
if !self.terms.contains_key(key) {
let index_field_term =
LeafIndexFieldTerm::new(key, &self.field_name, flags, self.doc_id, &self.fields)?;
index_field_term.validate_flags(flags)?;
self.terms.insert(String::from(key), index_field_term);
}
let index_field_term_ref = self.terms.get_mut(key).unwrap();
index_field_term_ref.validate_flags(flags)?;
Ok(index_field_term_ref)
}
pub fn set_doc_id_in_terms(&mut self, doc_id: DocId) -> Result<()> {
for ti in self.terms.values_mut() {
ti.set_document(doc_id)?;
}
Ok(())
}
}
pub struct LeafIndexLookup<T: Fields> {
pub fields: T,
pub doc_id: DocId,
index_fields: HashMap<String, LeafIndexField<T>>,
#[allow(dead_code)]
num_docs: i32,
#[allow(dead_code)]
max_doc: i32,
#[allow(dead_code)]
num_deleted_docs: i32,
}
impl<T: Fields + Clone> LeafIndexLookup<T> {
pub fn new(fields: T) -> LeafIndexLookup<T> {
LeafIndexLookup {
fields,
doc_id: -1,
index_fields: HashMap::new(),
num_docs: -1,
max_doc: -1,
num_deleted_docs: -1,
}
}
pub fn set_document(&mut self, doc_id: DocId) -> Result<()> {
if self.doc_id == doc_id {
return Ok(());
}
// We assume that docs are processed in ascending order of id. If this
// is not the case, we would have to re initialize all posting lists in
// IndexFieldTerm. TODO | {
self.freq
} | identifier_body |
index_lookup.rs | codec::{Fields, TermIterator, Terms};
use core::codec::{PostingIterator, PostingIteratorFlags};
use core::doc::Term;
use core::search::{Payload, NO_MORE_DOCS};
use core::util::DocId;
use error::{ErrorKind::IllegalState, Result};
use std::collections::hash_map::HashMap;
#[derive(Debug, Serialize, Deserialize)]
pub struct TermPosition {
pub position: i32,
pub start_offset: i32,
pub end_offset: i32,
pub payload: Payload,
}
impl Default for TermPosition {
fn default() -> Self {
TermPosition::new()
}
}
impl TermPosition {
pub fn new() -> TermPosition {
TermPosition {
position: -1,
start_offset: -1,
end_offset: -1,
payload: Payload::with_capacity(0),
}
}
pub fn payload_as_string(&mut self) -> String {
if self.payload.is_empty() {
unimplemented!()
} else {
unimplemented!()
}
}
pub fn payload_as_float(&mut self, default: f32) -> f32 {
if self.payload.is_empty() {
default
} else {
unimplemented!()
}
}
pub fn payload_as_int(&mut self, default: i32) -> i32 {
if self.payload.is_empty() {
default
} else {
unimplemented!()
}
}
}
/// Holds all information on a particular term in a field.
pub struct | <T: PostingIterator> {
postings: Option<T>,
flags: u16,
iterator: LeafPositionIterator,
#[allow(dead_code)]
identifier: Term,
freq: i32,
}
impl<T: PostingIterator> LeafIndexFieldTerm<T> {
pub fn new<TI: TermIterator<Postings = T>, Tm: Terms<Iterator = TI>, F: Fields<Terms = Tm>>(
term: &str,
field_name: &str,
flags: u16,
doc_id: DocId,
fields: &F,
) -> Result<Self> {
let identifier = Term::new(field_name.to_string(), term.as_bytes().to_vec());
if let Some(terms) = fields.terms(identifier.field())? {
let mut terms_iterator = terms.iterator()?;
let (postings, freq) = if terms_iterator.seek_exact(identifier.bytes.as_slice())? {
let mut posting = terms_iterator.postings_with_flags(flags)?;
let mut current_doc_pos = posting.doc_id();
if current_doc_pos < doc_id {
current_doc_pos = posting.advance(doc_id)?;
}
let freq = if current_doc_pos == doc_id {
posting.freq()?
} else {
0
};
(Some(posting), freq)
} else {
(None, 0)
};
let mut iterator = LeafPositionIterator::new();
iterator.resetted = false;
iterator.current_pos = 0;
iterator.freq = freq;
Ok(LeafIndexFieldTerm {
postings,
flags,
iterator,
identifier,
freq,
})
} else {
bail!(IllegalState(format!(
"Terms {} for doc {} - field '{}' must not be none!",
term, doc_id, field_name
)));
}
}
pub fn tf(&self) -> i32 {
self.freq
}
fn current_doc(&self) -> DocId {
if let Some(ref postings) = self.postings {
postings.doc_id()
} else {
NO_MORE_DOCS
}
}
pub fn set_document(&mut self, doc_id: i32) -> Result<()> {
let mut current_doc_pos = self.current_doc();
if current_doc_pos < doc_id {
current_doc_pos = self.postings.as_mut().unwrap().advance(doc_id)?;
}
if current_doc_pos == doc_id && doc_id < NO_MORE_DOCS {
self.freq = self.postings.as_ref().unwrap().freq()?;
} else {
self.freq = 0;
}
self.next_doc();
Ok(())
}
pub fn validate_flags(&self, flags2: u16) -> Result<()> {
if (self.flags & flags2) < flags2 {
panic!(
"You must call get with all required flags! Instead of {} call {} once",
flags2,
flags2 | self.flags
)
} else {
Ok(())
}
}
pub fn has_next(&self) -> bool {
self.iterator.current_pos < self.iterator.freq
}
pub fn next_pos(&mut self) -> Result<TermPosition> {
let term_pos = if let Some(ref mut postings) = self.postings {
TermPosition {
position: postings.next_position()?,
start_offset: postings.start_offset()?,
end_offset: postings.end_offset()?,
payload: postings.payload()?,
}
} else {
TermPosition {
position: -1,
start_offset: -1,
end_offset: -1,
payload: Vec::with_capacity(0),
}
};
self.iterator.current_pos += 1;
Ok(term_pos)
}
pub fn next_doc(&mut self) {
self.iterator.resetted = false;
self.iterator.current_pos = 0;
self.iterator.freq = self.tf();
}
pub fn reset(&mut self) -> Result<()> {
if self.iterator.resetted {
panic!(
"Cannot iterate twice! If you want to iterate more that once, add _CACHE \
explicitly."
)
}
self.iterator.resetted = true;
Ok(())
}
}
pub struct LeafPositionIterator {
resetted: bool,
freq: i32,
current_pos: i32,
}
impl Default for LeafPositionIterator {
fn default() -> Self {
LeafPositionIterator::new()
}
}
impl LeafPositionIterator {
pub fn new() -> LeafPositionIterator {
LeafPositionIterator {
resetted: false,
freq: -1,
current_pos: 0,
}
}
}
pub struct LeafIndexField<T: Fields> {
terms: HashMap<
String,
LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>,
>,
field_name: String,
doc_id: DocId,
fields: T,
}
///
// Script interface to all information regarding a field.
//
impl<T: Fields + Clone> LeafIndexField<T> {
pub fn new(field_name: &str, doc_id: DocId, fields: T) -> Self {
LeafIndexField {
terms: HashMap::new(),
field_name: String::from(field_name),
doc_id,
fields,
}
}
pub fn get(
&mut self,
key: &str,
) -> Result<&mut LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>>
{
self.get_with_flags(key, PostingIteratorFlags::FREQS)
}
// TODO: might be good to get the field lengths here somewhere?
// Returns a TermInfo object that can be used to access information on
// specific terms. flags can be set as described in TermInfo.
// TODO: here might be potential for running time improvement? If we knew in
// advance which terms are requested, we could provide an array which the
// user could then iterate over.
//
pub fn get_with_flags(
&mut self,
key: &str,
flags: u16,
) -> Result<&mut LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>>
{
if !self.terms.contains_key(key) {
let index_field_term =
LeafIndexFieldTerm::new(key, &self.field_name, flags, self.doc_id, &self.fields)?;
index_field_term.validate_flags(flags)?;
self.terms.insert(String::from(key), index_field_term);
}
let index_field_term_ref = self.terms.get_mut(key).unwrap();
index_field_term_ref.validate_flags(flags)?;
Ok(index_field_term_ref)
}
pub fn set_doc_id_in_terms(&mut self, doc_id: DocId) -> Result<()> {
for ti in self.terms.values_mut() {
ti.set_document(doc_id)?;
}
Ok(())
}
}
pub struct LeafIndexLookup<T: Fields> {
pub fields: T,
pub doc_id: DocId,
index_fields: HashMap<String, LeafIndexField<T>>,
#[allow(dead_code)]
num_docs: i32,
#[allow(dead_code)]
max_doc: i32,
#[allow(dead_code)]
num_deleted_docs: i32,
}
impl<T: Fields + Clone> LeafIndexLookup<T> {
pub fn new(fields: T) -> LeafIndexLookup<T> {
LeafIndexLookup {
fields,
doc_id: -1,
index_fields: HashMap::new(),
num_docs: -1,
max_doc: -1,
num_deleted_docs: -1,
}
}
pub fn set_document(&mut self, doc_id: DocId) -> Result<()> {
if self.doc_id == doc_id {
return Ok(());
}
// We assume that docs are processed in ascending order of id. If this
// is not the case, we would have to re initialize all posting lists in
// IndexFieldTerm. TODO | LeafIndexFieldTerm | identifier_name |
index_lookup.rs | codec::{Fields, TermIterator, Terms};
use core::codec::{PostingIterator, PostingIteratorFlags};
use core::doc::Term;
use core::search::{Payload, NO_MORE_DOCS};
use core::util::DocId;
use error::{ErrorKind::IllegalState, Result};
use std::collections::hash_map::HashMap;
#[derive(Debug, Serialize, Deserialize)]
pub struct TermPosition {
pub position: i32,
pub start_offset: i32,
pub end_offset: i32,
pub payload: Payload,
}
impl Default for TermPosition {
fn default() -> Self {
TermPosition::new()
}
}
impl TermPosition {
pub fn new() -> TermPosition {
TermPosition {
position: -1,
start_offset: -1,
end_offset: -1,
payload: Payload::with_capacity(0),
}
}
pub fn payload_as_string(&mut self) -> String {
if self.payload.is_empty() {
unimplemented!()
} else {
unimplemented!()
}
}
pub fn payload_as_float(&mut self, default: f32) -> f32 {
if self.payload.is_empty() {
default
} else {
unimplemented!()
}
}
pub fn payload_as_int(&mut self, default: i32) -> i32 {
if self.payload.is_empty() {
default
} else {
unimplemented!()
}
}
}
/// Holds all information on a particular term in a field.
pub struct LeafIndexFieldTerm<T: PostingIterator> {
postings: Option<T>,
flags: u16,
iterator: LeafPositionIterator,
#[allow(dead_code)]
identifier: Term,
freq: i32,
}
impl<T: PostingIterator> LeafIndexFieldTerm<T> {
pub fn new<TI: TermIterator<Postings = T>, Tm: Terms<Iterator = TI>, F: Fields<Terms = Tm>>(
term: &str,
field_name: &str,
flags: u16,
doc_id: DocId,
fields: &F,
) -> Result<Self> {
let identifier = Term::new(field_name.to_string(), term.as_bytes().to_vec());
if let Some(terms) = fields.terms(identifier.field())? {
let mut terms_iterator = terms.iterator()?;
let (postings, freq) = if terms_iterator.seek_exact(identifier.bytes.as_slice())? {
let mut posting = terms_iterator.postings_with_flags(flags)?;
let mut current_doc_pos = posting.doc_id();
if current_doc_pos < doc_id {
current_doc_pos = posting.advance(doc_id)?;
}
let freq = if current_doc_pos == doc_id {
posting.freq()?
} else {
0
};
(Some(posting), freq)
} else {
(None, 0)
};
let mut iterator = LeafPositionIterator::new();
iterator.resetted = false;
iterator.current_pos = 0;
iterator.freq = freq;
Ok(LeafIndexFieldTerm {
postings,
flags,
iterator,
identifier,
freq,
})
} else {
bail!(IllegalState(format!(
"Terms {} for doc {} - field '{}' must not be none!",
term, doc_id, field_name
)));
}
}
pub fn tf(&self) -> i32 {
self.freq
}
fn current_doc(&self) -> DocId {
if let Some(ref postings) = self.postings {
postings.doc_id()
} else {
NO_MORE_DOCS
}
}
pub fn set_document(&mut self, doc_id: i32) -> Result<()> {
let mut current_doc_pos = self.current_doc();
if current_doc_pos < doc_id {
current_doc_pos = self.postings.as_mut().unwrap().advance(doc_id)?;
}
if current_doc_pos == doc_id && doc_id < NO_MORE_DOCS {
self.freq = self.postings.as_ref().unwrap().freq()?;
} else {
self.freq = 0;
}
self.next_doc();
Ok(())
}
pub fn validate_flags(&self, flags2: u16) -> Result<()> {
if (self.flags & flags2) < flags2 {
panic!(
"You must call get with all required flags! Instead of {} call {} once",
flags2,
flags2 | self.flags
)
} else {
Ok(())
}
}
pub fn has_next(&self) -> bool {
self.iterator.current_pos < self.iterator.freq
}
pub fn next_pos(&mut self) -> Result<TermPosition> {
let term_pos = if let Some(ref mut postings) = self.postings {
TermPosition {
position: postings.next_position()?,
start_offset: postings.start_offset()?,
end_offset: postings.end_offset()?,
payload: postings.payload()?,
}
} else {
TermPosition {
position: -1,
start_offset: -1,
end_offset: -1,
payload: Vec::with_capacity(0),
}
};
self.iterator.current_pos += 1;
Ok(term_pos)
}
pub fn next_doc(&mut self) {
self.iterator.resetted = false;
self.iterator.current_pos = 0;
self.iterator.freq = self.tf();
}
pub fn reset(&mut self) -> Result<()> {
if self.iterator.resetted {
panic!(
"Cannot iterate twice! If you want to iterate more that once, add _CACHE \
explicitly."
)
}
self.iterator.resetted = true;
Ok(())
}
}
pub struct LeafPositionIterator {
resetted: bool,
freq: i32,
current_pos: i32,
}
impl Default for LeafPositionIterator {
fn default() -> Self {
LeafPositionIterator::new()
}
}
impl LeafPositionIterator {
pub fn new() -> LeafPositionIterator {
LeafPositionIterator {
resetted: false,
freq: -1,
current_pos: 0,
}
}
}
pub struct LeafIndexField<T: Fields> { | LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>,
>,
field_name: String,
doc_id: DocId,
fields: T,
}
///
// Script interface to all information regarding a field.
//
impl<T: Fields + Clone> LeafIndexField<T> {
pub fn new(field_name: &str, doc_id: DocId, fields: T) -> Self {
LeafIndexField {
terms: HashMap::new(),
field_name: String::from(field_name),
doc_id,
fields,
}
}
pub fn get(
&mut self,
key: &str,
) -> Result<&mut LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>>
{
self.get_with_flags(key, PostingIteratorFlags::FREQS)
}
// TODO: might be good to get the field lengths here somewhere?
// Returns a TermInfo object that can be used to access information on
// specific terms. flags can be set as described in TermInfo.
// TODO: here might be potential for running time improvement? If we knew in
// advance which terms are requested, we could provide an array which the
// user could then iterate over.
//
pub fn get_with_flags(
&mut self,
key: &str,
flags: u16,
) -> Result<&mut LeafIndexFieldTerm<<<T::Terms as Terms>::Iterator as TermIterator>::Postings>>
{
if !self.terms.contains_key(key) {
let index_field_term =
LeafIndexFieldTerm::new(key, &self.field_name, flags, self.doc_id, &self.fields)?;
index_field_term.validate_flags(flags)?;
self.terms.insert(String::from(key), index_field_term);
}
let index_field_term_ref = self.terms.get_mut(key).unwrap();
index_field_term_ref.validate_flags(flags)?;
Ok(index_field_term_ref)
}
pub fn set_doc_id_in_terms(&mut self, doc_id: DocId) -> Result<()> {
for ti in self.terms.values_mut() {
ti.set_document(doc_id)?;
}
Ok(())
}
}
pub struct LeafIndexLookup<T: Fields> {
pub fields: T,
pub doc_id: DocId,
index_fields: HashMap<String, LeafIndexField<T>>,
#[allow(dead_code)]
num_docs: i32,
#[allow(dead_code)]
max_doc: i32,
#[allow(dead_code)]
num_deleted_docs: i32,
}
impl<T: Fields + Clone> LeafIndexLookup<T> {
pub fn new(fields: T) -> LeafIndexLookup<T> {
LeafIndexLookup {
fields,
doc_id: -1,
index_fields: HashMap::new(),
num_docs: -1,
max_doc: -1,
num_deleted_docs: -1,
}
}
pub fn set_document(&mut self, doc_id: DocId) -> Result<()> {
if self.doc_id == doc_id {
return Ok(());
}
// We assume that docs are processed in ascending order of id. If this
// is not the case, we would have to re initialize all posting lists in
// IndexFieldTerm. TODO: | terms: HashMap<
String, | random_line_split |
indeploopinputparser.py | inputfile' : (str, 'mast.inp', 'Input file name'),\
}
class IndepLoopInputParser(MASTObj):
"""Scans an input file for "indeploop" keyword and copies it into
many input files.
Attributes:
self.indeploop <str>: flag for independent looping
self.loop_delim <str>: character for delimiting loops
self.loop_start <str>: character indicating start of loop
self.loop_end <str>: character indicating end of loop
self.baseinput <MASTFile>: MASTFile created from *.inp input file
self.pegloop1 <str>: flag for one pegged loop
self.pegloop2 <str>: flag for second pegged loop
Looping must be indicated at the beginning of the line, and the
text to be looped must be complete:
indeploop mast_kpoints (3x3x3 G, 5x5x5 G, 2x2x2 M, 4x4x4 M)
"""
def __init__(self, **kwargs):
MASTObj.__init__(self, ALLOWED_KEYS, **kwargs)
self.indeploop = "indeploop"
self.loop_delim = ","
self.loop_start = "("
self.loop_end = ")"
self.baseinput = MASTFile(self.keywords['inputfile'])
self.pegloop1 = "pegloop1"
self.pegloop2 = "pegloop2"
def main(self, verbose=0):
"""Scan for independent loops and set up dictionaries.
Return:
createdfiles <list of str>: list of created input files
"""
indepdict=self.scan_for_loop(self.indeploop)
pegdict1 = self.scan_for_loop(self.pegloop1)
pegdict2 = self.scan_for_loop(self.pegloop2)
if len(indepdict.keys()) == 0 and len(pegdict1.keys()) == 0 and len(pegdict2.keys()) == 0:
return dict()
alldict = dict(indepdict)
alldict.update(pegdict1)
alldict.update(pegdict2)
indepcomb=self.get_combo_list(indepdict, 0)
pegcomb1=self.get_combo_list(pegdict1, 1)
pegcomb2=self.get_combo_list(pegdict2, 1)
allcombs = self.combine_three_combo_lists(indepcomb, pegcomb1, pegcomb2)
datasets = self.prepare_looped_datasets(alldict, allcombs)
createdfiles = self.create_input_files(datasets)
if verbose == 1:
self.print_list(indepcomb)
self.print_list(pegcomb1)
self.print_list(pegcomb2)
self.print_list(allcombs)
for datakey in datasets:
self.print_list(datasets[datakey])
return createdfiles
def scan_for_loop(self, loopkey):
"""Scan for loops.
Args:
loopkey <str>: string for searching for loops. This is
either "indeploop" or "pegloop"
Return:
loopdict <dict>: Dictionary where loop line indices in the file
are keys, with values
'looplist', containing a list of strings, and
'prepend', containing any text to prepend
'append', containing any text to append
e.g. loopdict[12]['looplist']=['3x3x3 G','5x5x5 G']
['prepend']='mast_kpoints '
['append']=''
"""
loopdict=dict() | realline=""
split1=""
split2=""
numlines = len(self.baseinput.data)
lidx =0
while lidx < numlines:
dataline = self.baseinput.data[lidx].strip()
if loopkey in dataline:
loopdict[lidx] = dict()
realline=dataline.split(' ',1)[1] #strip off indeploop
split1 = realline.split(self.loop_start)
split2 = split1[1].split(self.loop_end)
loopdict[lidx]['prepend'] = split1[0]
loopdict[lidx]['append'] = split2[1]
loopdict[lidx]['looplist'] = split2[0].split(self.loop_delim)
lidx = lidx + 1
#print "TTM DEBUG: ", loopdict
if len(loopdict.keys()) == 0:
return dict()
return loopdict
def get_combo_list(self, loopdict, pegged=0):
"""Prepare a combination list of looping indices.
Args:
loopdict <dict>: dictionary of looping items from
scan_for_loop
Returns:
combolist <list of str>: list of strings, like ["3-0","5-1"]
representing combinations
"""
combolist=list()
flatlists=list()
loopkeys = list(loopdict.keys())
loopkeys.sort()
if pegged == 0:
for loopkey in loopkeys:
numloop = len(loopdict[loopkey]['looplist'])
loopct=0
flatlist=list()
while loopct < numloop:
flatlist.append(str(loopkey) + '-' + str(loopct))
loopct = loopct + 1
flatlists.append(flatlist)
import itertools
prod_list = itertools.product(*flatlists)
stopiter = 0
while not stopiter:
try:
mycomb = prod_list.next()
except StopIteration:
stopiter = 1
if stopiter == 0:
combolist.append(list(mycomb))
elif pegged == 1:
if len(loopkeys) == 0:
return combolist #Empty list
numloop = len(loopdict[loopkeys[0]]['looplist']) #all same len
numct=0
while numct < numloop:
flatlist=list()
for loopkey in loopkeys:
flatlist.append(str(loopkey) + '-' + str(numct))
numct = numct + 1
combolist.append(flatlist)
#print "TTM DEBUG: ", flatlists
return combolist
def combine_three_combo_lists(self, indeplist, peglist1, peglist2):
"""Combine two pegged lists and one independent list.
Args:
indeplist <list of list>: List of indeploop combinations.
peglist1 <list of list>: List of pegged loop 1 combinations
peglist2 <list of list>: List of pegged loop 2 combinations
Returns:
alllist <list of list>: List of all combinations
"""
templist=list()
threelist=list()
templist = self.combine_combo_lists(indeplist, peglist1)
threelist = self.combine_combo_lists(templist, peglist2)
return threelist
def combine_combo_lists(self, indeplist, peggedlist):
"""Combine combination lists.
Args:
indeplist <list of list>: List of indeploop combinations.
peggedlist <list of list>: List of pegged loop combinations
Returns:
alllist <list of list>: List of all combinations
"""
alllist=list()
if len(peggedlist) == 0:
return indeplist
if len(indeplist) == 0:
return peggedlist
for pegitem in peggedlist:
for indepitem in indeplist:
alllistentry = list(pegitem)
alllistentry.extend(indepitem)
alllist.append(alllistentry)
return alllist
def print_list(self, mylist):
"""Print a list.
Args:
mylist <list of str>
"""
for myitem in mylist:
print myitem
return
def prepare_looped_lines(self, alldict, comblist):
"""Prepare looped lines from looping dictionary.
Args:
loopdict <dict>: dictionary of looping items from
scan_for_loop
Returns:
loopline_dict <dict of dict>: dictionary of different lines for
input files, with keys being
the looped line index
"""
loopline_dict=dict()
for stridx in comblist:
lidx = int(stridx.split('-')[0])
loopidx = int(stridx.split('-')[1])
loopline_dict[lidx] = alldict[lidx]['prepend'] + alldict[lidx]['looplist'][loopidx].strip() + alldict[lidx]['append'] + '\n'
return loopline_dict
def prepare_looped_datasets(self, alldict, allcombs):
"""Prepare looped datasets from looping lines.
Args:
alldict <dict>: line dictionary for looping
allcombs <list of list>: index combinations for looping
Returns:
datasets_dict <dict of list>: full datasets for new input files
"""
datasets_dict=dict()
numcombs = len(allcombs)
combct = 0
while combct < numcombs:
newdata = list(self.baseinput.data)
loopedlines = dict()
loopedlines = self.prepare_looped_lines(alldict, allcombs[combct])
for l | random_line_split |
|
indeploopinputparser.py | inputfile' : (str, 'mast.inp', 'Input file name'),\
}
class IndepLoopInputParser(MASTObj):
"""Scans an input file for "indeploop" keyword and copies it into
many input files.
Attributes:
self.indeploop <str>: flag for independent looping
self.loop_delim <str>: character for delimiting loops
self.loop_start <str>: character indicating start of loop
self.loop_end <str>: character indicating end of loop
self.baseinput <MASTFile>: MASTFile created from *.inp input file
self.pegloop1 <str>: flag for one pegged loop
self.pegloop2 <str>: flag for second pegged loop
Looping must be indicated at the beginning of the line, and the
text to be looped must be complete:
indeploop mast_kpoints (3x3x3 G, 5x5x5 G, 2x2x2 M, 4x4x4 M)
"""
def __init__(self, **kwargs):
MASTObj.__init__(self, ALLOWED_KEYS, **kwargs)
self.indeploop = "indeploop"
self.loop_delim = ","
self.loop_start = "("
self.loop_end = ")"
self.baseinput = MASTFile(self.keywords['inputfile'])
self.pegloop1 = "pegloop1"
self.pegloop2 = "pegloop2"
def main(self, verbose=0):
"""Scan for independent loops and set up dictionaries.
Return:
createdfiles <list of str>: list of created input files
"""
indepdict=self.scan_for_loop(self.indeploop)
pegdict1 = self.scan_for_loop(self.pegloop1)
pegdict2 = self.scan_for_loop(self.pegloop2)
if len(indepdict.keys()) == 0 and len(pegdict1.keys()) == 0 and len(pegdict2.keys()) == 0:
return dict()
alldict = dict(indepdict)
alldict.update(pegdict1)
alldict.update(pegdict2)
indepcomb=self.get_combo_list(indepdict, 0)
pegcomb1=self.get_combo_list(pegdict1, 1)
pegcomb2=self.get_combo_list(pegdict2, 1)
allcombs = self.combine_three_combo_lists(indepcomb, pegcomb1, pegcomb2)
datasets = self.prepare_looped_datasets(alldict, allcombs)
createdfiles = self.create_input_files(datasets)
if verbose == 1:
self.print_list(indepcomb)
self.print_list(pegcomb1)
self.print_list(pegcomb2)
self.print_list(allcombs)
for datakey in datasets:
self.print_list(datasets[datakey])
return createdfiles
def scan_for_loop(self, loopkey):
| while lidx < numlines:
dataline = self.baseinput.data[lidx].strip()
if loopkey in dataline:
loopdict[lidx] = dict()
realline=dataline.split(' ',1)[1] #strip off indeploop
split1 = realline.split(self.loop_start)
split2 = split1[1].split(self.loop_end)
loopdict[lidx]['prepend'] = split1[0]
loopdict[lidx]['append'] = split2[1]
loopdict[lidx]['looplist'] = split2[0].split(self.loop_delim)
lidx = lidx + 1
#print "TTM DEBUG: ", loopdict
if len(loopdict.keys()) == 0:
return dict()
return loopdict
def get_combo_list(self, loopdict, pegged=0):
"""Prepare a combination list of looping indices.
Args:
loopdict <dict>: dictionary of looping items from
scan_for_loop
Returns:
combolist <list of str>: list of strings, like ["3-0","5-1"]
representing combinations
"""
combolist=list()
flatlists=list()
loopkeys = list(loopdict.keys())
loopkeys.sort()
if pegged == 0:
for loopkey in loopkeys:
numloop = len(loopdict[loopkey]['looplist'])
loopct=0
flatlist=list()
while loopct < numloop:
flatlist.append(str(loopkey) + '-' + str(loopct))
loopct = loopct + 1
flatlists.append(flatlist)
import itertools
prod_list = itertools.product(*flatlists)
stopiter = 0
while not stopiter:
try:
mycomb = prod_list.next()
except StopIteration:
stopiter = 1
if stopiter == 0:
combolist.append(list(mycomb))
elif pegged == 1:
if len(loopkeys) == 0:
return combolist #Empty list
numloop = len(loopdict[loopkeys[0]]['looplist']) #all same len
numct=0
while numct < numloop:
flatlist=list()
for loopkey in loopkeys:
flatlist.append(str(loopkey) + '-' + str(numct))
numct = numct + 1
combolist.append(flatlist)
#print "TTM DEBUG: ", flatlists
return combolist
def combine_three_combo_lists(self, indeplist, peglist1, peglist2):
"""Combine two pegged lists and one independent list.
Args:
indeplist <list of list>: List of indeploop combinations.
peglist1 <list of list>: List of pegged loop 1 combinations
peglist2 <list of list>: List of pegged loop 2 combinations
Returns:
alllist <list of list>: List of all combinations
"""
templist=list()
threelist=list()
templist = self.combine_combo_lists(indeplist, peglist1)
threelist = self.combine_combo_lists(templist, peglist2)
return threelist
def combine_combo_lists(self, indeplist, peggedlist):
"""Combine combination lists.
Args:
indeplist <list of list>: List of indeploop combinations.
peggedlist <list of list>: List of pegged loop combinations
Returns:
alllist <list of list>: List of all combinations
"""
alllist=list()
if len(peggedlist) == 0:
return indeplist
if len(indeplist) == 0:
return peggedlist
for pegitem in peggedlist:
for indepitem in indeplist:
alllistentry = list(pegitem)
alllistentry.extend(indepitem)
alllist.append(alllistentry)
return alllist
def print_list(self, mylist):
"""Print a list.
Args:
mylist <list of str>
"""
for myitem in mylist:
print myitem
return
def prepare_looped_lines(self, alldict, comblist):
"""Prepare looped lines from looping dictionary.
Args:
loopdict <dict>: dictionary of looping items from
scan_for_loop
Returns:
loopline_dict <dict of dict>: dictionary of different lines for
input files, with keys being
the looped line index
"""
loopline_dict=dict()
for stridx in comblist:
lidx = int(stridx.split('-')[0])
loopidx = int(stridx.split('-')[1])
loopline_dict[lidx] = alldict[lidx]['prepend'] + alldict[lidx]['looplist'][loopidx].strip() + alldict[lidx]['append'] + '\n'
return loopline_dict
def prepare_looped_datasets(self, alldict, allcombs):
"""Prepare looped datasets from looping lines.
Args:
alldict <dict>: line dictionary for looping
allcombs <list of list>: index combinations for looping
Returns:
datasets_dict <dict of list>: full datasets for new input files
"""
datasets_dict=dict()
numcombs = len(allcombs)
combct = 0
while combct < numcombs:
newdata = list(self.baseinput.data)
loopedlines = dict()
loopedlines = self.prepare_looped_lines(alldict, allcombs[combct])
for lvalid | """Scan for loops.
Args:
loopkey <str>: string for searching for loops. This is
either "indeploop" or "pegloop"
Return:
loopdict <dict>: Dictionary where loop line indices in the file
are keys, with values
'looplist', containing a list of strings, and
'prepend', containing any text to prepend
'append', containing any text to append
e.g. loopdict[12]['looplist']=['3x3x3 G','5x5x5 G']
['prepend']='mast_kpoints '
['append']=''
"""
loopdict=dict()
realline=""
split1=""
split2=""
numlines = len(self.baseinput.data)
lidx =0 | identifier_body |
indeploopinputparser.py | inputfile' : (str, 'mast.inp', 'Input file name'),\
}
class IndepLoopInputParser(MASTObj):
"""Scans an input file for "indeploop" keyword and copies it into
many input files.
Attributes:
self.indeploop <str>: flag for independent looping
self.loop_delim <str>: character for delimiting loops
self.loop_start <str>: character indicating start of loop
self.loop_end <str>: character indicating end of loop
self.baseinput <MASTFile>: MASTFile created from *.inp input file
self.pegloop1 <str>: flag for one pegged loop
self.pegloop2 <str>: flag for second pegged loop
Looping must be indicated at the beginning of the line, and the
text to be looped must be complete:
indeploop mast_kpoints (3x3x3 G, 5x5x5 G, 2x2x2 M, 4x4x4 M)
"""
def __init__(self, **kwargs):
MASTObj.__init__(self, ALLOWED_KEYS, **kwargs)
self.indeploop = "indeploop"
self.loop_delim = ","
self.loop_start = "("
self.loop_end = ")"
self.baseinput = MASTFile(self.keywords['inputfile'])
self.pegloop1 = "pegloop1"
self.pegloop2 = "pegloop2"
def main(self, verbose=0):
"""Scan for independent loops and set up dictionaries.
Return:
createdfiles <list of str>: list of created input files
"""
indepdict=self.scan_for_loop(self.indeploop)
pegdict1 = self.scan_for_loop(self.pegloop1)
pegdict2 = self.scan_for_loop(self.pegloop2)
if len(indepdict.keys()) == 0 and len(pegdict1.keys()) == 0 and len(pegdict2.keys()) == 0:
return dict()
alldict = dict(indepdict)
alldict.update(pegdict1)
alldict.update(pegdict2)
indepcomb=self.get_combo_list(indepdict, 0)
pegcomb1=self.get_combo_list(pegdict1, 1)
pegcomb2=self.get_combo_list(pegdict2, 1)
allcombs = self.combine_three_combo_lists(indepcomb, pegcomb1, pegcomb2)
datasets = self.prepare_looped_datasets(alldict, allcombs)
createdfiles = self.create_input_files(datasets)
if verbose == 1:
self.print_list(indepcomb)
self.print_list(pegcomb1)
self.print_list(pegcomb2)
self.print_list(allcombs)
for datakey in datasets:
self.print_list(datasets[datakey])
return createdfiles
def scan_for_loop(self, loopkey):
"""Scan for loops.
Args:
loopkey <str>: string for searching for loops. This is
either "indeploop" or "pegloop"
Return:
loopdict <dict>: Dictionary where loop line indices in the file
are keys, with values
'looplist', containing a list of strings, and
'prepend', containing any text to prepend
'append', containing any text to append
e.g. loopdict[12]['looplist']=['3x3x3 G','5x5x5 G']
['prepend']='mast_kpoints '
['append']=''
"""
loopdict=dict()
realline=""
split1=""
split2=""
numlines = len(self.baseinput.data)
lidx =0
while lidx < numlines:
dataline = self.baseinput.data[lidx].strip()
if loopkey in dataline:
loopdict[lidx] = dict()
realline=dataline.split(' ',1)[1] #strip off indeploop
split1 = realline.split(self.loop_start)
split2 = split1[1].split(self.loop_end)
loopdict[lidx]['prepend'] = split1[0]
loopdict[lidx]['append'] = split2[1]
loopdict[lidx]['looplist'] = split2[0].split(self.loop_delim)
lidx = lidx + 1
#print "TTM DEBUG: ", loopdict
if len(loopdict.keys()) == 0:
return dict()
return loopdict
def get_combo_list(self, loopdict, pegged=0):
"""Prepare a combination list of looping indices.
Args:
loopdict <dict>: dictionary of looping items from
scan_for_loop
Returns:
combolist <list of str>: list of strings, like ["3-0","5-1"]
representing combinations
"""
combolist=list()
flatlists=list()
loopkeys = list(loopdict.keys())
loopkeys.sort()
if pegged == 0:
for loopkey in loopkeys:
numloop = len(loopdict[loopkey]['looplist'])
loopct=0
flatlist=list()
while loopct < numloop:
flatlist.append(str(loopkey) + '-' + str(loopct))
loopct = loopct + 1
flatlists.append(flatlist)
import itertools
prod_list = itertools.product(*flatlists)
stopiter = 0
while not stopiter:
|
elif pegged == 1:
if len(loopkeys) == 0:
return combolist #Empty list
numloop = len(loopdict[loopkeys[0]]['looplist']) #all same len
numct=0
while numct < numloop:
flatlist=list()
for loopkey in loopkeys:
flatlist.append(str(loopkey) + '-' + str(numct))
numct = numct + 1
combolist.append(flatlist)
#print "TTM DEBUG: ", flatlists
return combolist
def combine_three_combo_lists(self, indeplist, peglist1, peglist2):
"""Combine two pegged lists and one independent list.
Args:
indeplist <list of list>: List of indeploop combinations.
peglist1 <list of list>: List of pegged loop 1 combinations
peglist2 <list of list>: List of pegged loop 2 combinations
Returns:
alllist <list of list>: List of all combinations
"""
templist=list()
threelist=list()
templist = self.combine_combo_lists(indeplist, peglist1)
threelist = self.combine_combo_lists(templist, peglist2)
return threelist
def combine_combo_lists(self, indeplist, peggedlist):
"""Combine combination lists.
Args:
indeplist <list of list>: List of indeploop combinations.
peggedlist <list of list>: List of pegged loop combinations
Returns:
alllist <list of list>: List of all combinations
"""
alllist=list()
if len(peggedlist) == 0:
return indeplist
if len(indeplist) == 0:
return peggedlist
for pegitem in peggedlist:
for indepitem in indeplist:
alllistentry = list(pegitem)
alllistentry.extend(indepitem)
alllist.append(alllistentry)
return alllist
def print_list(self, mylist):
"""Print a list.
Args:
mylist <list of str>
"""
for myitem in mylist:
print myitem
return
def prepare_looped_lines(self, alldict, comblist):
"""Prepare looped lines from looping dictionary.
Args:
loopdict <dict>: dictionary of looping items from
scan_for_loop
Returns:
loopline_dict <dict of dict>: dictionary of different lines for
input files, with keys being
the looped line index
"""
loopline_dict=dict()
for stridx in comblist:
lidx = int(stridx.split('-')[0])
loopidx = int(stridx.split('-')[1])
loopline_dict[lidx] = alldict[lidx]['prepend'] + alldict[lidx]['looplist'][loopidx].strip() + alldict[lidx]['append'] + '\n'
return loopline_dict
def prepare_looped_datasets(self, alldict, allcombs):
"""Prepare looped datasets from looping lines.
Args:
alldict <dict>: line dictionary for looping
allcombs <list of list>: index combinations for looping
Returns:
datasets_dict <dict of list>: full datasets for new input files
"""
datasets_dict=dict()
numcombs = len(allcombs)
combct = 0
while combct < numcombs:
newdata = list(self.baseinput.data)
loopedlines = dict()
loopedlines = self.prepare_looped_lines(alldict, allcombs[combct])
for | try:
mycomb = prod_list.next()
except StopIteration:
stopiter = 1
if stopiter == 0:
combolist.append(list(mycomb)) | conditional_block |
indeploopinputparser.py | inputfile' : (str, 'mast.inp', 'Input file name'),\
}
class IndepLoopInputParser(MASTObj):
"""Scans an input file for "indeploop" keyword and copies it into
many input files.
Attributes:
self.indeploop <str>: flag for independent looping
self.loop_delim <str>: character for delimiting loops
self.loop_start <str>: character indicating start of loop
self.loop_end <str>: character indicating end of loop
self.baseinput <MASTFile>: MASTFile created from *.inp input file
self.pegloop1 <str>: flag for one pegged loop
self.pegloop2 <str>: flag for second pegged loop
Looping must be indicated at the beginning of the line, and the
text to be looped must be complete:
indeploop mast_kpoints (3x3x3 G, 5x5x5 G, 2x2x2 M, 4x4x4 M)
"""
def __init__(self, **kwargs):
MASTObj.__init__(self, ALLOWED_KEYS, **kwargs)
self.indeploop = "indeploop"
self.loop_delim = ","
self.loop_start = "("
self.loop_end = ")"
self.baseinput = MASTFile(self.keywords['inputfile'])
self.pegloop1 = "pegloop1"
self.pegloop2 = "pegloop2"
def main(self, verbose=0):
"""Scan for independent loops and set up dictionaries.
Return:
createdfiles <list of str>: list of created input files
"""
indepdict=self.scan_for_loop(self.indeploop)
pegdict1 = self.scan_for_loop(self.pegloop1)
pegdict2 = self.scan_for_loop(self.pegloop2)
if len(indepdict.keys()) == 0 and len(pegdict1.keys()) == 0 and len(pegdict2.keys()) == 0:
return dict()
alldict = dict(indepdict)
alldict.update(pegdict1)
alldict.update(pegdict2)
indepcomb=self.get_combo_list(indepdict, 0)
pegcomb1=self.get_combo_list(pegdict1, 1)
pegcomb2=self.get_combo_list(pegdict2, 1)
allcombs = self.combine_three_combo_lists(indepcomb, pegcomb1, pegcomb2)
datasets = self.prepare_looped_datasets(alldict, allcombs)
createdfiles = self.create_input_files(datasets)
if verbose == 1:
self.print_list(indepcomb)
self.print_list(pegcomb1)
self.print_list(pegcomb2)
self.print_list(allcombs)
for datakey in datasets:
self.print_list(datasets[datakey])
return createdfiles
def scan_for_loop(self, loopkey):
"""Scan for loops.
Args:
loopkey <str>: string for searching for loops. This is
either "indeploop" or "pegloop"
Return:
loopdict <dict>: Dictionary where loop line indices in the file
are keys, with values
'looplist', containing a list of strings, and
'prepend', containing any text to prepend
'append', containing any text to append
e.g. loopdict[12]['looplist']=['3x3x3 G','5x5x5 G']
['prepend']='mast_kpoints '
['append']=''
"""
loopdict=dict()
realline=""
split1=""
split2=""
numlines = len(self.baseinput.data)
lidx =0
while lidx < numlines:
dataline = self.baseinput.data[lidx].strip()
if loopkey in dataline:
loopdict[lidx] = dict()
realline=dataline.split(' ',1)[1] #strip off indeploop
split1 = realline.split(self.loop_start)
split2 = split1[1].split(self.loop_end)
loopdict[lidx]['prepend'] = split1[0]
loopdict[lidx]['append'] = split2[1]
loopdict[lidx]['looplist'] = split2[0].split(self.loop_delim)
lidx = lidx + 1
#print "TTM DEBUG: ", loopdict
if len(loopdict.keys()) == 0:
return dict()
return loopdict
def get_combo_list(self, loopdict, pegged=0):
"""Prepare a combination list of looping indices.
Args:
loopdict <dict>: dictionary of looping items from
scan_for_loop
Returns:
combolist <list of str>: list of strings, like ["3-0","5-1"]
representing combinations
"""
combolist=list()
flatlists=list()
loopkeys = list(loopdict.keys())
loopkeys.sort()
if pegged == 0:
for loopkey in loopkeys:
numloop = len(loopdict[loopkey]['looplist'])
loopct=0
flatlist=list()
while loopct < numloop:
flatlist.append(str(loopkey) + '-' + str(loopct))
loopct = loopct + 1
flatlists.append(flatlist)
import itertools
prod_list = itertools.product(*flatlists)
stopiter = 0
while not stopiter:
try:
mycomb = prod_list.next()
except StopIteration:
stopiter = 1
if stopiter == 0:
combolist.append(list(mycomb))
elif pegged == 1:
if len(loopkeys) == 0:
return combolist #Empty list
numloop = len(loopdict[loopkeys[0]]['looplist']) #all same len
numct=0
while numct < numloop:
flatlist=list()
for loopkey in loopkeys:
flatlist.append(str(loopkey) + '-' + str(numct))
numct = numct + 1
combolist.append(flatlist)
#print "TTM DEBUG: ", flatlists
return combolist
def combine_three_combo_lists(self, indeplist, peglist1, peglist2):
"""Combine two pegged lists and one independent list.
Args:
indeplist <list of list>: List of indeploop combinations.
peglist1 <list of list>: List of pegged loop 1 combinations
peglist2 <list of list>: List of pegged loop 2 combinations
Returns:
alllist <list of list>: List of all combinations
"""
templist=list()
threelist=list()
templist = self.combine_combo_lists(indeplist, peglist1)
threelist = self.combine_combo_lists(templist, peglist2)
return threelist
def combine_combo_lists(self, indeplist, peggedlist):
"""Combine combination lists.
Args:
indeplist <list of list>: List of indeploop combinations.
peggedlist <list of list>: List of pegged loop combinations
Returns:
alllist <list of list>: List of all combinations
"""
alllist=list()
if len(peggedlist) == 0:
return indeplist
if len(indeplist) == 0:
return peggedlist
for pegitem in peggedlist:
for indepitem in indeplist:
alllistentry = list(pegitem)
alllistentry.extend(indepitem)
alllist.append(alllistentry)
return alllist
def print_list(self, mylist):
"""Print a list.
Args:
mylist <list of str>
"""
for myitem in mylist:
print myitem
return
def | (self, alldict, comblist):
"""Prepare looped lines from looping dictionary.
Args:
loopdict <dict>: dictionary of looping items from
scan_for_loop
Returns:
loopline_dict <dict of dict>: dictionary of different lines for
input files, with keys being
the looped line index
"""
loopline_dict=dict()
for stridx in comblist:
lidx = int(stridx.split('-')[0])
loopidx = int(stridx.split('-')[1])
loopline_dict[lidx] = alldict[lidx]['prepend'] + alldict[lidx]['looplist'][loopidx].strip() + alldict[lidx]['append'] + '\n'
return loopline_dict
def prepare_looped_datasets(self, alldict, allcombs):
"""Prepare looped datasets from looping lines.
Args:
alldict <dict>: line dictionary for looping
allcombs <list of list>: index combinations for looping
Returns:
datasets_dict <dict of list>: full datasets for new input files
"""
datasets_dict=dict()
numcombs = len(allcombs)
combct = 0
while combct < numcombs:
newdata = list(self.baseinput.data)
loopedlines = dict()
loopedlines = self.prepare_looped_lines(alldict, allcombs[combct])
for | prepare_looped_lines | identifier_name |
concepts.ts | atua no desenvolvimento de aplicativos ou sistemas, programando nativamente ou por meio de outras linguagens, para dispositivos móveis.`,
},
{
key: `SQL`,
value: `SQL é uma linguagem declarativa de sintaxe relativamente simples, voltada a bancos de dados relacionais`,
},
{
key: `API - Interface de programação de aplicações`,
value: `As APIs são um conjunto de padrões que fazem parte de uma interface e que permitem a criação de plataformas de maneira mais simples e prática para desenvolvedores. A partir de APIs é possível criar softwares, aplicativos, programas e plataformas diversas. Por exemplo, apps desenvolvidos para celulares Android e iPhone (iOS) são criados a partir de padrões definidos e disponibilizados pelas APIs de cada sistema operacional.`,
},
{
key: `Estrutura de seleção / Estrutura de decisão ( if / se )`,
value: `Estrutura de seleção é, na ciência da computação, uma estrutura de desvio do fluxo de controle presente em linguagens de programação que realiza diferentes computações ou ações dependendo se a seleção é verdadeira ou falsa, em que a expressão é processada e transformada em um valor booleano.`,
},
{
key: `switch switch case`,
value: `Nas linguagens de programação de computador, uma instrução switch é um tipo de mecanismo de controle de seleção usado para permitir que o valor de uma variável ou expressão mude o fluxo de controle da execução do programa via pesquisa e mapa.`,
},
{
key: `else`,
value: `A condição else serve como um caminho alternativo do if. Ou seja, o else vai ser executado se a condição sendo verificada no if for falsa.`,
},
{
key: `REST`,
value: `Representational State Transfer, em português Transferência Representacional de Estado, é um estilo de arquitetura de software que define um conjunto de restrições a serem usadas para a criação de web services.`,
},
{
key: `HTML`,
value: `HTML abreviação para a expressão inglesa HyperText Markup Language, que significa: "Linguagem de Marcação de Hipertexto" é uma linguagem de marcação utilizada na construção de páginas na Web. Documentos HTML podem ser interpretados por navegadores. A tecnologia é fruto da junção entre os padrões HyTime e SGML.`,
},
{
key: `Banco de Dados`,
value: `Bancos de dados ou bases de dados são conjuntos de arquivos relacionados entre si com registros sobre pessoas, lugares ou coisas. São coleções organizadas de dados que se relacionam de forma a criar algum sentido e dar mais eficiência durante uma pesquisa ou estudo científico.`,
}, | {
key: `Linguagem de Programação`,
value: `A linguagem de programação é um método padronizado, formado por um conjunto de regras sintáticas e semânticas, de implementação de um código fonte - que pode ser compilado e transformado em um programa de computador, ou usado como script interpretado - que informará instruções de processamento ao computador.`,
},
{
key: `Estrutura de repetição ( loop / for / while / laço / laço de repetição / foreach)`,
value: `Na maioria das linguagens de programação de computadores, um loop é uma instrução de fluxo de controle que permite que o código seja executado repetidamente com base em uma determinada condição booleana. O loop pode ser considerado uma declaração if de repetição. Existe também o foreach, em português para cada, é uma expressão idiomática de linguagem de computador para travessia de itens em um coleção. Foreach geralmente é usada em lugar de uma declaração for padrão.`,
},
{
key: `Array ( Vetor / Arranjo )`,
value: `Em programação de computadores, um arranjo (array) é uma estrutura de dados que armazena uma coleção de elementos de tal forma que cada um dos elementos possa ser identificado por, pelo menos, um índice ou uma chave.`,
},
{
key: `Código Fonte`,
value: `Criado em um editor de textos, contendo os comandos da linguagem de programação. Serve como entrada para o compilador.`,
},
{
key: `Código Objeto`,
value: `Criado pela conversão do código-fonte em linguagem de máquina. É gerado pelo compilador. Só é criado quando não há erros no código-fonte.`,
},
{
key: `Operadores aritméticos`,
value: `São aqueles que estudamos na escola, aquelas funções básicas de somar, subtrair, multiplicar, dividir, por exemplo.`,
},
{
key: `Operadores de relação`,
value: `Operadores relacionais são utilizados para comparar valores, o resultado de uma expressão relacional é um valor booleano (verdadeiro ou falso). Os operadores relacionais são: igual, diferente, maior, menor, maior ou igual, menor ou igual.`,
},
{
key: `Operadores Lógicos`,
value: `Os operadores lógicos são usados para representar situações lógicas que não podem ser representadas por operadores aritméticos. Também são chamados conectivos lógicos por unirem duas expressões simples numa composta. Podem ser operadores binários, que operam em duas sentenças ou expressões, ou unário que opera numa sentença só.`,
},
{
key: `Back-end backend back end`,
value: `Programador que trabalha com a lógica da aplicação, armazenamento e segurança de todos os dados gerados.`,
},
{
key: `Backup`,
value: `Cópia de dados de um dispositivo de armazenamento a outro para evitar perda dos dados originais.`,
},
{
key: `Bit`,
value: `É a menor unidade de medida de dados que pode ser armazenada ou transmitida no universo computacional. Um bit tem um único valor, zero ou um, com valor de verdadeiro ou de falso.`,
},
{
key: `Branch`,
value: `Ramificações de um repositório (ver significado). Cada branch tem a mesma base, mas podem ter alterações diferentes entre si. É comum que o objetivo final em um projeto seja que todas as branches sejam unidas a uma branch principal.`,
},
{
key: `Bug`,
value: `Erro no código que pode estar causando um comportamento indesejado na aplicação.`,
},
{
key: `Cache`,
value: `É uma memória pequena, porém muito rápida. Um espaço de armazenamento dedicado a guardar informações que são utilizadas com frequência. É comum que alguns sites utilizem os navegadores para guardar informações em cache.`,
},
{
key: `Cluster`,
value: `Arquitetura de sistema capaz combinar vários computadores para trabalharem em conjunto para que, em muitos aspectos, eles possam ser vistos como um único sistema.`,
},
{
key: `Cookies`,
value: `Pequenos arquivos enviados por um site para o navegador do usuário, que ficam armazenados no computador. Eles são feitos para guardar dados específicos de um cliente ou website, para serem acessados futuramente pelo servidor web de maneira mais rápida.`,
},
{
key: `Framework`,
value: `Se trata de uma estrutura base, uma espécie de plataforma de desenvolvimento, que contém ferramentas, guias, sistemas e componentes que agilizem o processo de desenvolvimento de soluções, auxiliando os especialistas em seus trabalhos.`,
},
{
key: `Git`,
value: `Sistema de controle de versão de arquivos. Nele é possível que diversas pessoas contribuam simultaneamente editando e criando novos arquivos sem o risco que as alterações sejam sobrescritas.`,
},
{
key: `Hardware`,
value: `Refere-se a objetos nos quais você pode realmente tocar, como discos, unidades de disco, telas, teclados, impressoras, placas e chips. Hardware é todo componente físico, interno ou externo do seu computador ou outro dispositivo, que determina do que um dispositivo é capaz e como você pode usá-lo. Embora dependa de um software para funcionar (e vice-versa), o hardware é um elemento a parte e igualmente importante.`,
},
{
key: | {
key: `DevOps`,
value: `Na Ciência da Computação o DevOps, é uma cultura na engenharia de software que aproxima os desenvolvedores de software e os operadores do software / administradores do sistema, com característica `,
}, | random_line_split |
b-button.ts | nearest containing form, if any.
*
* The form prop lets you place a component anywhere in the document but have it included with a form elsewhere
* in the document.
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#htmlattrdefform
*
* @example
* ```
* < b-input :name = 'fname' | :form = 'my-form'
*
* < b-button type = 'submit' | :form = 'my-form'
* Submit
*
* < form id = my-form
* ```
*/
@prop({type: String, required: false})
readonly form?: string;
/** @see [[iAccess.autofocus]] */
@prop({type: Boolean, required: false})
readonly autofocus?: boolean;
/** @see [[iAccess.tabIndex]] */
@prop({type: Number, required: false})
readonly tabIndex?: number;
/**
* Icon to show before the button text
*
* @example
* ```
* < b-button :preIcon = 'dropdown'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly preIcon?: string;
/**
* Name of the used component to show `preIcon`
*
* @default `'b-icon'`
* @example
* ```
* < b-button :preIconComponent = 'b-my-icon'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly preIconComponent?: string;
/**
* Icon to show after the button text
*
* @example
* ```
* < b-button :icon = 'dropdown'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly icon?: string;
/**
* Name of the used component to show `icon`
*
* @default `'b-icon'`
* @example
* ```
* < b-button :iconComponent = 'b-my-icon'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly iconComponent?: string;
/**
* A component to show "in-progress" state or
* Boolean, if needed to show progress by slot or `b-progress-icon`
*
* @default `'b-progress-icon'`
* @example
* ```
* < b-button :progressIcon = 'b-my-progress-icon'
* Submit
* ```
*/
@prop({type: [String, Boolean], required: false})
readonly progressIcon?: string | boolean;
/**
* Tooltip text to show during hover the cursor
*
* @example
* ```
* < b-button :hint = 'Click on me!!!'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly hint?: string;
/**
* Tooltip position to show during hover the cursor
*
* @see [[gHint]]
* @example
* ```
* < b-button :hint = 'Click on me!!!' | :hintPos = 'bottom-right'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly hintPos?: HintPosition;
/**
* The way to show dropdown if the `dropdown` slot is provided
* @see [[gHint]]
*
* @example
* ```
* < b-button :dropdown = 'bottom-right'
* < template #default
* Submit
*
* < template #dropdown
* Additional information...
* ```
*/
@prop(String)
readonly dropdown: string = 'bottom';
/**
* Initial additional attributes are provided to an "internal" (native) button tag
* @see [[bButton.$refs.button]]
*/
@prop({type: Object, required: false})
readonly attrsProp?: Dictionary;
/**
* Additional attributes are provided to an "internal" (native) button tag
*
* @see [[bButton.attrsProp]]
* @see [[bButton.$refs.button]]
*/
get attrs(): Dictionary {
const
attrs = {...this.attrsProp};
if (this.type === 'link') {
attrs.href = this.href;
} else {
attrs.type = this.type;
attrs.form = this.form;
}
if (this.hasDropdown) {
attrs['aria-controls'] = this.dom.getId('dropdown');
attrs['aria-expanded'] = this.mods.opened;
}
return attrs;
}
/** @see [[iAccess.isFocused]] */
@computed({dependencies: ['mods.focused']})
get isFocused(): boolean {
const
{button} = this.$refs;
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (button != null) {
return document.activeElement === button;
}
return iAccess.isFocused(this);
}
/**
* List of selected files (works with the `file` type)
*/
get files(): CanUndef<FileList> {
return this.$refs.file?.files ?? undefined;
}
/**
* True if the component has a dropdown area
*/
get hasDropdown(): boolean {
return Boolean(
this.vdom.getSlot('dropdown') && (
this.isFunctional ||
this.opt.ifOnce('opened', this.m.opened !== 'false') > 0 && delete this.watchModsStore.opened
)
);
}
static override readonly mods: ModsDecl = {
...iAccess.mods,
...iVisible.mods,
...iWidth.mods,
...iSize.mods,
opened: [
...iOpenToggle.mods.opened ?? [],
['false']
],
upper: [
'true',
'false'
]
};
protected override readonly $refs!: {
button: HTMLButtonElement;
file?: HTMLInputElement;
dropdown?: Element;
};
/**
* If the `type` prop is passed to `file`, resets a file input
*/
@wait('ready')
reset(): CanPromise<void> {
const
{file} = this.$refs;
if (file != null) {
file.value = '';
}
}
/** @see [[iOpenToggle.initCloseHelpers]] */
@p({hook: 'beforeDataCreate', replace: false})
protected initCloseHelpers(events?: CloseHelperEvents): void {
iOpenToggle.initCloseHelpers(this, events);
}
protected override initModEvents(): void {
const
{localEmitter: $e} = this;
super.initModEvents();
iProgress.initModEvents(this);
iAccess.initModEvents(this);
iOpenToggle.initModEvents(this);
iVisible.initModEvents(this);
$e.on('block.mod.*.opened.*', (e: ModEvent) => this.waitStatus('ready', () => {
const expanded = e.value !== 'false' && e.type !== 'remove';
this.$refs.button.setAttribute('aria-expanded', String(expanded));
}));
$e.on('block.mod.*.disabled.*', (e: ModEvent) => this.waitStatus('ready', () => {
const {
button,
file
} = this.$refs;
const disabled = e.value !== 'false' && e.type !== 'remove';
button.disabled = disabled;
if (file != null) {
file.disabled = disabled;
}
}));
$e.on('block.mod.*.focused.*', (e: ModEvent) => this.waitStatus('ready', () => {
const
{button} = this.$refs;
if (e.value !== 'false' && e.type !== 'remove') {
button.focus();
} else {
button.blur();
}
}));
}
/**
* Handler: button trigger
*
* @param e
* @emits `click(e: Event)`
*/
protected async onClick(e: Event): Promise<void> {
switch (this.type) {
case 'link':
break;
case 'file':
this.$refs.file?.click();
break;
default: {
const
dp = this.dataProvider;
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (dp != null && (dp !== 'Provider' || this.href != null)) {
let
that = this;
if (this.href != null) {
that = this.base(this.href);
}
await (<Function>that[this.method])(undefined);
// Form attribute fix for MS Edge && IE
} else if (this.form != null && this.type === 'submit') {
e.preventDefault();
const form = this.dom.getComponent<bForm>(`#${this.form}`);
form && await form.submit();
}
await this.toggle();
}
}
this.emit('click', e);
}
/**
* Handler: changing a value of the file input
*
* @param e
* @emits `change(result: InputEvent)`
*/
protected | onFileChange | identifier_name |
|
b-button.ts | (iAccess, iOpenToggle)
class bButton extends iData implements iAccess, iOpenToggle, iVisible, iWidth, iSize {
override readonly rootTag: string = 'span';
override readonly dataProvider: string = 'Provider';
override readonly defaultRequestFilter: RequestFilter = true;
/** @see [[iVisible.prototype.hideIfOffline]] */
@prop(Boolean)
readonly hideIfOffline: boolean = false;
/**
* A button' type to create. There can be values:
*
* 1. `button` - simple button control;
* 2. `submit` - button to send the tied form;
* 3. `file` - button to open the file uploading dialog;
* 4. `link` - hyperlink to the specified URL (to provide URL, use the `href` prop).
*
* @example
* ```
* < b-button @click = console.log('boom!')
* Make boom!
*
* < b-button :type = 'file' | @onChange = console.log($event)
* Upload a file
*
* < b-button :type = 'link' | :href = 'https://google.com'
* Go to Google
*
* < b-form
* < b-input :name = 'name'
* < b-button :type = 'submit'
* Send
* ```
*/
@prop(String)
readonly type: ButtonType = 'button';
/**
* If the `type` prop is passed to `file`, this prop defines which file types are selectable in a file upload control
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#htmlattrdefaccept
* @example
* ```
* < b-button :type = 'file' | :accept = '.txt' | @onChange = console.log($event)
* Upload a file
* ```
*/
@prop({type: String, required: false})
readonly accept?: string;
/**
* If the `type` prop is passed to `link`, this prop contains a value for `<a href>`.
* Otherwise, the prop includes a base URL for a data provider.
*
* @example
* ```
* < b-button :type = 'link' | :href = 'https://google.com'
* Go to Google
*
* < b-button :href = '/generate/user'
* Generate a new user
* ```
*/
@prop({type: String, required: false})
readonly href?: string;
/**
* A data provider method to use if `dataProvider` or `href` props are passed
*
* @example
* ```
* < b-button :href = '/generate/user' | :method = 'put'
* Generate a new user
*
* < b-button :dataProvider = 'Cities' | :method = 'peek'
* Fetch cities
* ```
*/
@prop(String)
readonly method: ModelMethod = 'get';
/**
* A string specifying the `<form>` element with which the component is associated (that is, its form owner).
* This string's value, if present, must match the id of a `<form>` element in the same document.
* If this attribute isn't specified, the component is associated with the nearest containing form, if any.
*
* The form prop lets you place a component anywhere in the document but have it included with a form elsewhere
* in the document.
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#htmlattrdefform
*
* @example
* ```
* < b-input :name = 'fname' | :form = 'my-form'
*
* < b-button type = 'submit' | :form = 'my-form'
* Submit
*
* < form id = my-form
* ```
*/
@prop({type: String, required: false})
readonly form?: string;
/** @see [[iAccess.autofocus]] */
@prop({type: Boolean, required: false})
readonly autofocus?: boolean;
/** @see [[iAccess.tabIndex]] */
@prop({type: Number, required: false})
readonly tabIndex?: number;
/**
* Icon to show before the button text
*
* @example
* ```
* < b-button :preIcon = 'dropdown'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly preIcon?: string;
/**
* Name of the used component to show `preIcon`
*
* @default `'b-icon'`
* @example
* ```
* < b-button :preIconComponent = 'b-my-icon'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly preIconComponent?: string;
/**
* Icon to show after the button text
*
* @example
* ```
* < b-button :icon = 'dropdown'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly icon?: string;
/**
* Name of the used component to show `icon`
*
* @default `'b-icon'`
* @example
* ```
* < b-button :iconComponent = 'b-my-icon'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly iconComponent?: string;
/**
* A component to show "in-progress" state or
* Boolean, if needed to show progress by slot or `b-progress-icon`
*
* @default `'b-progress-icon'`
* @example
* ```
* < b-button :progressIcon = 'b-my-progress-icon'
* Submit
* ```
*/
@prop({type: [String, Boolean], required: false})
readonly progressIcon?: string | boolean;
/**
* Tooltip text to show during hover the cursor
*
* @example
* ```
* < b-button :hint = 'Click on me!!!'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly hint?: string;
/**
* Tooltip position to show during hover the cursor
*
* @see [[gHint]]
* @example
* ```
* < b-button :hint = 'Click on me!!!' | :hintPos = 'bottom-right'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly hintPos?: HintPosition;
/**
* The way to show dropdown if the `dropdown` slot is provided
* @see [[gHint]]
*
* @example
* ```
* < b-button :dropdown = 'bottom-right'
* < template #default
* Submit
*
* < template #dropdown
* Additional information...
* ```
*/
@prop(String)
readonly dropdown: string = 'bottom';
/**
* Initial additional attributes are provided to an "internal" (native) button tag
* @see [[bButton.$refs.button]]
*/
@prop({type: Object, required: false})
readonly attrsProp?: Dictionary;
/**
* Additional attributes are provided to an "internal" (native) button tag
*
* @see [[bButton.attrsProp]]
* @see [[bButton.$refs.button]]
*/
get attrs(): Dictionary {
const
attrs = {...this.attrsProp};
if (this.type === 'link') {
attrs.href = this.href;
} else |
if (this.hasDropdown) {
attrs['aria-controls'] = this.dom.getId('dropdown');
attrs['aria-expanded'] = this.mods.opened;
}
return attrs;
}
/** @see [[iAccess.isFocused]] */
@computed({dependencies: ['mods.focused']})
get isFocused(): boolean {
const
{button} = this.$refs;
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (button != null) {
return document.activeElement === button;
}
return iAccess.isFocused(this);
}
/**
* List of selected files (works with the `file` type)
*/
get files(): CanUndef<FileList> {
return this.$refs.file?.files ?? undefined;
}
/**
* True if the component has a dropdown area
*/
get hasDropdown(): boolean {
return Boolean(
this.vdom.getSlot('dropdown') && (
this.isFunctional ||
this.opt.ifOnce('opened', this.m.opened !== 'false') > 0 && delete this.watchModsStore.opened
)
);
}
static override readonly mods: ModsDecl = {
...iAccess.mods,
...iVisible.mods,
...iWidth.mods,
...iSize.mods,
opened: [
...iOpenToggle.mods.opened ?? [],
['false']
],
| {
attrs.type = this.type;
attrs.form = this.form;
} | conditional_block |
b-button.ts | derive(iAccess, iOpenToggle)
class bButton extends iData implements iAccess, iOpenToggle, iVisible, iWidth, iSize {
override readonly rootTag: string = 'span';
override readonly dataProvider: string = 'Provider';
override readonly defaultRequestFilter: RequestFilter = true;
/** @see [[iVisible.prototype.hideIfOffline]] */
@prop(Boolean)
readonly hideIfOffline: boolean = false;
| /**
* A button' type to create. There can be values:
*
* 1. `button` - simple button control;
* 2. `submit` - button to send the tied form;
* 3. `file` - button to open the file uploading dialog;
* 4. `link` - hyperlink to the specified URL (to provide URL, use the `href` prop).
*
* @example
* ```
* < b-button @click = console.log('boom!')
* Make boom!
*
* < b-button :type = 'file' | @onChange = console.log($event)
* Upload a file
*
* < b-button :type = 'link' | :href = 'https://google.com'
* Go to Google
*
* < b-form
* < b-input :name = 'name'
* < b-button :type = 'submit'
* Send
* ```
*/
@prop(String)
readonly type: ButtonType = 'button';
/**
* If the `type` prop is passed to `file`, this prop defines which file types are selectable in a file upload control
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#htmlattrdefaccept
* @example
* ```
* < b-button :type = 'file' | :accept = '.txt' | @onChange = console.log($event)
* Upload a file
* ```
*/
@prop({type: String, required: false})
readonly accept?: string;
/**
* If the `type` prop is passed to `link`, this prop contains a value for `<a href>`.
* Otherwise, the prop includes a base URL for a data provider.
*
* @example
* ```
* < b-button :type = 'link' | :href = 'https://google.com'
* Go to Google
*
* < b-button :href = '/generate/user'
* Generate a new user
* ```
*/
@prop({type: String, required: false})
readonly href?: string;
/**
* A data provider method to use if `dataProvider` or `href` props are passed
*
* @example
* ```
* < b-button :href = '/generate/user' | :method = 'put'
* Generate a new user
*
* < b-button :dataProvider = 'Cities' | :method = 'peek'
* Fetch cities
* ```
*/
@prop(String)
readonly method: ModelMethod = 'get';
/**
* A string specifying the `<form>` element with which the component is associated (that is, its form owner).
* This string's value, if present, must match the id of a `<form>` element in the same document.
* If this attribute isn't specified, the component is associated with the nearest containing form, if any.
*
* The form prop lets you place a component anywhere in the document but have it included with a form elsewhere
* in the document.
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#htmlattrdefform
*
* @example
* ```
* < b-input :name = 'fname' | :form = 'my-form'
*
* < b-button type = 'submit' | :form = 'my-form'
* Submit
*
* < form id = my-form
* ```
*/
@prop({type: String, required: false})
readonly form?: string;
/** @see [[iAccess.autofocus]] */
@prop({type: Boolean, required: false})
readonly autofocus?: boolean;
/** @see [[iAccess.tabIndex]] */
@prop({type: Number, required: false})
readonly tabIndex?: number;
/**
* Icon to show before the button text
*
* @example
* ```
* < b-button :preIcon = 'dropdown'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly preIcon?: string;
/**
* Name of the used component to show `preIcon`
*
* @default `'b-icon'`
* @example
* ```
* < b-button :preIconComponent = 'b-my-icon'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly preIconComponent?: string;
/**
* Icon to show after the button text
*
* @example
* ```
* < b-button :icon = 'dropdown'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly icon?: string;
/**
* Name of the used component to show `icon`
*
* @default `'b-icon'`
* @example
* ```
* < b-button :iconComponent = 'b-my-icon'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly iconComponent?: string;
/**
* A component to show "in-progress" state or
* Boolean, if needed to show progress by slot or `b-progress-icon`
*
* @default `'b-progress-icon'`
* @example
* ```
* < b-button :progressIcon = 'b-my-progress-icon'
* Submit
* ```
*/
@prop({type: [String, Boolean], required: false})
readonly progressIcon?: string | boolean;
/**
* Tooltip text to show during hover the cursor
*
* @example
* ```
* < b-button :hint = 'Click on me!!!'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly hint?: string;
/**
* Tooltip position to show during hover the cursor
*
* @see [[gHint]]
* @example
* ```
* < b-button :hint = 'Click on me!!!' | :hintPos = 'bottom-right'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly hintPos?: HintPosition;
/**
* The way to show dropdown if the `dropdown` slot is provided
* @see [[gHint]]
*
* @example
* ```
* < b-button :dropdown = 'bottom-right'
* < template #default
* Submit
*
* < template #dropdown
* Additional information...
* ```
*/
@prop(String)
readonly dropdown: string = 'bottom';
/**
* Initial additional attributes are provided to an "internal" (native) button tag
* @see [[bButton.$refs.button]]
*/
@prop({type: Object, required: false})
readonly attrsProp?: Dictionary;
/**
* Additional attributes are provided to an "internal" (native) button tag
*
* @see [[bButton.attrsProp]]
* @see [[bButton.$refs.button]]
*/
get attrs(): Dictionary {
const
attrs = {...this.attrsProp};
if (this.type === 'link') {
attrs.href = this.href;
} else {
attrs.type = this.type;
attrs.form = this.form;
}
if (this.hasDropdown) {
attrs['aria-controls'] = this.dom.getId('dropdown');
attrs['aria-expanded'] = this.mods.opened;
}
return attrs;
}
/** @see [[iAccess.isFocused]] */
@computed({dependencies: ['mods.focused']})
get isFocused(): boolean {
const
{button} = this.$refs;
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (button != null) {
return document.activeElement === button;
}
return iAccess.isFocused(this);
}
/**
* List of selected files (works with the `file` type)
*/
get files(): CanUndef<FileList> {
return this.$refs.file?.files ?? undefined;
}
/**
* True if the component has a dropdown area
*/
get hasDropdown(): boolean {
return Boolean(
this.vdom.getSlot('dropdown') && (
this.isFunctional ||
this.opt.ifOnce('opened', this.m.opened !== 'false') > 0 && delete this.watchModsStore.opened
)
);
}
static override readonly mods: ModsDecl = {
...iAccess.mods,
...iVisible.mods,
...iWidth.mods,
...iSize.mods,
opened: [
...iOpenToggle.mods.opened ?? [],
['false']
],
upper | random_line_split |
|
b-button.ts | (iAccess, iOpenToggle)
class bButton extends iData implements iAccess, iOpenToggle, iVisible, iWidth, iSize {
override readonly rootTag: string = 'span';
override readonly dataProvider: string = 'Provider';
override readonly defaultRequestFilter: RequestFilter = true;
/** @see [[iVisible.prototype.hideIfOffline]] */
@prop(Boolean)
readonly hideIfOffline: boolean = false;
/**
* A button' type to create. There can be values:
*
* 1. `button` - simple button control;
* 2. `submit` - button to send the tied form;
* 3. `file` - button to open the file uploading dialog;
* 4. `link` - hyperlink to the specified URL (to provide URL, use the `href` prop).
*
* @example
* ```
* < b-button @click = console.log('boom!')
* Make boom!
*
* < b-button :type = 'file' | @onChange = console.log($event)
* Upload a file
*
* < b-button :type = 'link' | :href = 'https://google.com'
* Go to Google
*
* < b-form
* < b-input :name = 'name'
* < b-button :type = 'submit'
* Send
* ```
*/
@prop(String)
readonly type: ButtonType = 'button';
/**
* If the `type` prop is passed to `file`, this prop defines which file types are selectable in a file upload control
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#htmlattrdefaccept
* @example
* ```
* < b-button :type = 'file' | :accept = '.txt' | @onChange = console.log($event)
* Upload a file
* ```
*/
@prop({type: String, required: false})
readonly accept?: string;
/**
* If the `type` prop is passed to `link`, this prop contains a value for `<a href>`.
* Otherwise, the prop includes a base URL for a data provider.
*
* @example
* ```
* < b-button :type = 'link' | :href = 'https://google.com'
* Go to Google
*
* < b-button :href = '/generate/user'
* Generate a new user
* ```
*/
@prop({type: String, required: false})
readonly href?: string;
/**
* A data provider method to use if `dataProvider` or `href` props are passed
*
* @example
* ```
* < b-button :href = '/generate/user' | :method = 'put'
* Generate a new user
*
* < b-button :dataProvider = 'Cities' | :method = 'peek'
* Fetch cities
* ```
*/
@prop(String)
readonly method: ModelMethod = 'get';
/**
* A string specifying the `<form>` element with which the component is associated (that is, its form owner).
* This string's value, if present, must match the id of a `<form>` element in the same document.
* If this attribute isn't specified, the component is associated with the nearest containing form, if any.
*
* The form prop lets you place a component anywhere in the document but have it included with a form elsewhere
* in the document.
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#htmlattrdefform
*
* @example
* ```
* < b-input :name = 'fname' | :form = 'my-form'
*
* < b-button type = 'submit' | :form = 'my-form'
* Submit
*
* < form id = my-form
* ```
*/
@prop({type: String, required: false})
readonly form?: string;
/** @see [[iAccess.autofocus]] */
@prop({type: Boolean, required: false})
readonly autofocus?: boolean;
/** @see [[iAccess.tabIndex]] */
@prop({type: Number, required: false})
readonly tabIndex?: number;
/**
* Icon to show before the button text
*
* @example
* ```
* < b-button :preIcon = 'dropdown'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly preIcon?: string;
/**
* Name of the used component to show `preIcon`
*
* @default `'b-icon'`
* @example
* ```
* < b-button :preIconComponent = 'b-my-icon'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly preIconComponent?: string;
/**
* Icon to show after the button text
*
* @example
* ```
* < b-button :icon = 'dropdown'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly icon?: string;
/**
* Name of the used component to show `icon`
*
* @default `'b-icon'`
* @example
* ```
* < b-button :iconComponent = 'b-my-icon'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly iconComponent?: string;
/**
* A component to show "in-progress" state or
* Boolean, if needed to show progress by slot or `b-progress-icon`
*
* @default `'b-progress-icon'`
* @example
* ```
* < b-button :progressIcon = 'b-my-progress-icon'
* Submit
* ```
*/
@prop({type: [String, Boolean], required: false})
readonly progressIcon?: string | boolean;
/**
* Tooltip text to show during hover the cursor
*
* @example
* ```
* < b-button :hint = 'Click on me!!!'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly hint?: string;
/**
* Tooltip position to show during hover the cursor
*
* @see [[gHint]]
* @example
* ```
* < b-button :hint = 'Click on me!!!' | :hintPos = 'bottom-right'
* Submit
* ```
*/
@prop({type: String, required: false})
readonly hintPos?: HintPosition;
/**
* The way to show dropdown if the `dropdown` slot is provided
* @see [[gHint]]
*
* @example
* ```
* < b-button :dropdown = 'bottom-right'
* < template #default
* Submit
*
* < template #dropdown
* Additional information...
* ```
*/
@prop(String)
readonly dropdown: string = 'bottom';
/**
* Initial additional attributes are provided to an "internal" (native) button tag
* @see [[bButton.$refs.button]]
*/
@prop({type: Object, required: false})
readonly attrsProp?: Dictionary;
/**
* Additional attributes are provided to an "internal" (native) button tag
*
* @see [[bButton.attrsProp]]
* @see [[bButton.$refs.button]]
*/
get attrs(): Dictionary {
const
attrs = {...this.attrsProp};
if (this.type === 'link') {
attrs.href = this.href;
} else {
attrs.type = this.type;
attrs.form = this.form;
}
if (this.hasDropdown) {
attrs['aria-controls'] = this.dom.getId('dropdown');
attrs['aria-expanded'] = this.mods.opened;
}
return attrs;
}
/** @see [[iAccess.isFocused]] */
@computed({dependencies: ['mods.focused']})
get isFocused(): boolean {
const
{button} = this.$refs;
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (button != null) {
return document.activeElement === button;
}
return iAccess.isFocused(this);
}
/**
* List of selected files (works with the `file` type)
*/
get files(): CanUndef<FileList> {
return this.$refs.file?.files ?? undefined;
}
/**
* True if the component has a dropdown area
*/
get hasDropdown(): boolean |
static override readonly mods: ModsDecl = {
...iAccess.mods,
...iVisible.mods,
...iWidth.mods,
...iSize.mods,
opened: [
...iOpenToggle.mods.opened ?? [],
['false']
],
| {
return Boolean(
this.vdom.getSlot('dropdown') && (
this.isFunctional ||
this.opt.ifOnce('opened', this.m.opened !== 'false') > 0 && delete this.watchModsStore.opened
)
);
} | identifier_body |
properties.ts | ;
bgn_channels: never[];
'2_4GHz_channels': number[];
'5GHz_DFS_channels': number[];
g_only_channels: never[];
region: number;
'5GHz_channels': number[];
'5GHz_wide_channels': number[];
'2_4GHz_n_only_channels': never[];
bg_channels: never[];
an_channels: never[];
b_only_channels: never[];
}
export type sySI = Temperature[];
export interface Temperature {
name: string;
value: number | bigint;
thousandths: number;
}
export interface CLTM {
verbose: number;
EstECapTemp: number;
Imax: number;
CLTMAsleep: number;
Last_w: number;
TargetTemp: number;
Imin: number;
override5: bigint;
override24: bigint;
Kp: number;
disabled: number;
Ki: number;
BadSensors: 0;
Ts: number;
Last_uVal: number;
}
export interface sPLL {
AudioPLLRetries: number;
CPUPLLRetries: number;
BasebandPLLRetries: number;
PCIePLLRetries: bigint;
DDRPLLRetries: number;
}
export interface sySt {
problems: ProblemCode[];
}
export interface syIg {
problems: ProblemCode[];
}
export interface ProblemCode {
code: StatusCodes;
}
export enum StatusCode {
SETUP_OVER_WAN = 'waCF',
USB_POWER_INSUFFICIENT = 'usbf',
ETHERNET_UNPLUGGED = 'waNL',
UPDATE_AVAILABLE = 'upAv',
UNRECOGNIZED_APPLE_ID_PASSWORD = 'iCPW',
DOUBLE_NAT = 'DubN',
IPV6_RELAY_ERROR = '6rdn',
UNSECURED_WIRELESS_NETWORK = 'opNW',
PPPOE_SERVER = 'ppNS',
}
type StatusCodes = 'waCF' | 'usbf' | 'waNL' | 'upAv' | 'iCPW' | 'DubN' | '6rdn' | 'opNW' | 'ppNS';
export interface timz {
zoneVersion: string; // '2018g'
zoneName: string;
zoneFile: Buffer;
}
export interface usrd {
users: FileSharingUser[];
}
export interface FileSharingUser {
password: string;
fileSharingAccess: number;
name: string;
}
export enum FileSharingAccess {
READ_WRITE = 0,
READ_ONLY = 1,
NOT_ALLOWED = 2,
}
export interface stat {
anonUUID: string;
entries: DebugDataEntry[];
version: number;
}
export interface DebugDataEntry {
data: Buffer;
title: string;
dictID: number;
}
export interface dSpn {
DiskHasSpunDown: number;
}
export interface raSL {
[ifname: string]: WiFiClient[];
}
export interface WiFiClient {
txrate_local: number;
nf_chain: bigint[];
txrate: number;
rssi_local: bigint;
rates: string;
rxPkt: number;
phy_mode: string; // '802.11a/n'
ampdu: string; // 'on'
rxBytes: number;
rssi_chain: bigint[];
inact: number;
rssi: bigint;
noise: bigint;
txPkt: number;
timeAssoc: number;
opmode: string; // 'sta'
txpower: number;
mcsindex_local: number;
macAddress: MACAddress;
'11n_mode': string;
htcaps: number;
txBytes: bigint;
}
export interface raSR {
scan_results: ScanResults[];
}
export interface ScanResults {
[ifname: string]: ScanResultsData[];
}
export interface ScanResultsData {
IE: Buffer;
BEACON_INT: number;
HT_INFO_IE?: {
IE_KEY_HT_INFO_EXT_CHANNEL: number | bigint;
};
HT_CAPS_IE?: {
IE_KEY_HT_CAPS_HT40: boolean;
};
RSN_IE?: {
IE_KEY_RSN_UCIPHERS: number[];
IE_KEY_RSN_MCIPHER: number;
IE_KEY_RSN_AUTHSELS: number[];
IE_KEY_RSN_VERSION: number;
};
RATES: number[];
SSID_STR: string;
CAPABILITIES: number;
RSSI: bigint;
BSSID: string;
SSID: Buffer;
DWDS_IE?: {
IE_KEY_DWDS_ROLE: number;
IE_KEY_DWDS_VERSION: number;
};
WPA_IE?: {
IE_KEY_WPA_AUTHSELS: number[];
IE_KEY_WPA_UCIPHERS: number[];
IE_KEY_WPA_VERSION: number; // 1
IE_KEY_WPA_MCIPHER: number;
};
'80211D_IE'?: {
IE_KEY_80211D_LOCATION: string;
IE_KEY_80211D_CHAN_INFO_ARRAY: {
IE_KEY_80211D_NUM_CHANNELS: number;
IE_KEY_80211D_MAX_POWER: number;
IE_KEY_80211D_FIRST_CHANNEL: number;
}[];
IE_KEY_80211D_COUNTRY_CODE: string;
};
APPLE_IE?: {
APPLE_IE_VERSION: number; // 1
APPLE_IE_WPS_CAP: boolean;
APPLE_IE_PRODUCT_ID: number;
};
CHANNEL: number;
NOISE: number;
}
export interface WiFi {
radios: WiFiRadioConfiguration[];
guestnet_intrabss?: boolean;
}
export interface WiFiRadioConfiguration {
legacywds: never[];
raSk: boolean;
country: string;
acEn: boolean;
raWE: Buffer;
/**
* Transmit Power
*
* 10 - 10%, 25 - 25%, 50 - 50%, 100 - 100%
*/
raPo: number;
/** Use wide channels */
raWC: boolean;
iso_cc: string;
raU2: number;
/** Radio mode */
raMd: RadioMode;
/** Wireless network mode */
raSt: WirelessNetworkMode;
/** Wi-Fi Network Name (SSID) */
raNm: string;
/** RADIUS server #1 IPv4 address */
raI1: IPv4Address;
/** RADIUS server #2 IPv4 address */
raI2: IPv4Address;
raF2: number;
raCA: boolean;
phymodes: number;
raT2: number;
dwFl: number;
raFl: number;
/** Password */
raCr: Buffer;
/** WPA Enterprise ?? */
raEA: boolean;
rTSN: boolean;
/** Allow this network to be extended */
dWDS: boolean;
raRe: number;
vaps: AdditionalWiFiNetwork[];
raEV: number;
raCi: boolean;
/** Channel ?? */
raCh: number;
raRo: boolean;
/** Hidden/closed network */
raCl: boolean;
/** RADIUS server #2 secret */
raS2: string;
sku: string;
raDt: number;
raR2: number;
/** WPA Group Key Timeout */
raKT: number;
ra1C: boolean;
/** MAC address */
raMA: Buffer;
/** RADIUS server #1 port ?? */
raAu: number;
raTm: number;
wdFl: number;
raDe: number;
raWM: number;
/**
* Multicast rate
*
* 1 - 1 Mbps, 2 - 2 Mbps, 85 - 5.5 Mbps, 6 - 6 Mbps, 9 - 9 Mbps, 17 - 11 Mbps, 18 - 12 Mbps, 24 - 18 Mbps,
* 36 - 24 Mbps)
*/
raMu: number;
/** RADIUS server #1 secret */
raSe: string;
}
export enum RadioMode {
'802.11b only' = 1,
'802.11b/g compatible' = 2,
'802.11g only' = 3,
'802.11a only' = 4,
'802.11n (802.11a compatible)' = 5,
'802.11n (802.11b/g compatible)' = 6,
'802.11n only (2.4 GHz)' = 7,
'802.11n only (5 GHz)' = 8,
}
export enum WirelessNetworkMode {
CREATE_NETWORK = 0,
JOIN_NETWORK = 1,
DISABLED = 3,
}
export interface AdditionalWiFiNetwork {
raSk: boolean;
raWE: Buffer;
raNm: string;
Mode: number; // Was set to 6 - the same as raMu ??
Enabled: boolean;
/** Password */
raCr: Buffer;
/** WPA Enterprise */ | raEA: boolean;
rTSN: boolean; | random_line_split |
|
main.rs | r + ox, y * r + oy);
route.push(p);
}
route.push(route[0]);
routes.push(route);
}
r -= dr;
}
routes
}
fn heart(ox: f64, oy: f64, r: f64, ang: f64) -> Vec<(f64, f64)> {
let mut route = Vec::new();
let count = (2.0 * PI * r / 0.5).floor() as usize;
for i in 0..count {
let a = i as f64 * 2.0 * PI / (count as f64);
let (x, y) = heart_function(a);
let (x, y) = p_r((x, y), ang);
let p = (x * r + ox, y * r + oy);
route.push(p);
}
route.push(route[0]);
route
}
fn heart_nested_rotating<R: Rng>(
rng: &mut R,
ox: f64,
oy: f64,
radius: f64,
extra_radius: f64,
dr: f64,
stopr: f64,
) -> Vec<Vec<(f64, f64)>> {
let mut routes = Vec::new();
let mut r = extra_radius;
let perlin = Perlin::new();
let seed = rng.gen_range(-555., 555.);
let f = rng.gen_range(0.05, 0.1) * rng.gen_range(0.2, 1.0);
let amp = rng.gen_range(0.03, 0.08) / f;
let basen = perlin.get([seed, f * r]);
while r > stopr {
let actualr = r.min(radius);
let count = (2.0 * PI * r / 0.5).floor() as usize;
if count > 3 {
let n = perlin.get([seed, f * r]) - basen;
let offr = n * amp;
let route = heart(ox, oy, actualr, offr);
routes.push(route);
}
r -= dr;
}
routes
}
fn cell(
seed: f64,
origin: (f64, f64),
width: f64,
height: f64,
pad: f64,
) -> Vec<Vec<(f64, f64)>> {
let mut routes = Vec::new();
let mut rng = rng_from_seed(seed);
let dr = rng.gen_range(0.6, 1.0);
let r = (width.min(height) / 2.0 - pad) * rng.gen_range(0.8, 1.0);
let r2 = r
* (1.0
+ rng.gen_range(0.0, 1.0)
* rng.gen_range(0.0, 1.0)
* rng.gen_range(-1.0f64, 1.0).max(0.0));
/*if rng.gen_bool(0.1) {
routes.extend(heart_nested(
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
dr,
));
} else */
if rng.gen_bool(0.1) {
routes.push(heart_spiral(
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
dr,
));
} else {
let stopr = if rng.gen_bool(0.5) {
rng.gen_range(0.1, 0.7) * r
} else {
0.1
};
routes.extend(heart_nested_rotating(
&mut rng,
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
r2,
dr,
stopr,
));
}
let mut mask = PaintMask::new(0.2, width, height);
let ppad = rng.gen_range(4.0, 6.0);
// TODO use a inner heart step as mask to make a white?
// to protect the paper from having too much passage, we will cut some lines based on a grid lookup.
let prec = 0.5;
let passage_limit = 10;
let minlen = 3;
let mut passage = Passage2DCounter::new(prec, width, height);
let mut paths = vec![];
for r in routes {
let mut localpassage = Passage2DCounter::new(prec, width, height);
let mut path: Vec<(f64, f64)> = vec![];
for p in r {
let localp = (p.0 - origin.0, p.1 - origin.1);
if passage.get(localp) > passage_limit {
if path.len() >= minlen {
paths.push(path);
}
path = vec![];
} else {
path.push(p);
}
localpassage.count(localp);
mask.paint_circle(&VCircle::new(p.0 - origin.0, p.1 - origin.1, ppad));
}
if path.len() >= minlen {
paths.push(path);
}
passage.count_once_from(&localpassage);
}
routes = paths;
let bounds = (pad, pad, width - pad, height - pad);
let in_shape = |p: (f64, f64)| -> bool {
!mask.is_painted(p) && strictly_in_boundaries(p, bounds)
};
let does_overlap = |c: &VCircle| {
in_shape((c.x, c.y))
&& circle_route((c.x, c.y), c.r, 8)
.iter()
.all(|&p| in_shape(p))
};
let ppad = rng.gen_range(0.4, 0.8);
let min = rng.gen_range(1.5, 2.0);
let max = min + rng.gen_range(0.0, 5.0);
let optim = rng.gen_range(1, 10);
let count = 2000;
let circles = packing(
&mut rng,
vec![],
5000000,
count,
optim,
ppad,
bounds,
&does_overlap,
min,
max,
);
let aligned = rng.gen_bool(0.3);
for c in circles {
let x = c.x + origin.0;
let y = c.y + origin.1;
let r = c.r;
let ang = if aligned {
0.
} else {
PI + (c.x - width / 2.0).atan2(c.y - height / 2.0)
};
routes.push(heart(x, y, r, ang));
}
routes
}
fn art(opts: &Opts) -> Vec<Group> {
let width = opts.width;
let height = opts.height;
let cw = width / 2.;
let ch = height / 2.;
let pad = 5.;
let cols = (width / cw).floor() as usize;
let rows = (height / ch).floor() as usize;
let offsetx = 0.0;
let offsety = 0.0;
let routes = (0..rows)
.into_par_iter()
.flat_map(|j| {
(0..cols).into_par_iter().flat_map(move |i| {
cell(
opts.seed / 7.7 + (i + j * cols) as f64 / 0.3,
(offsetx + i as f64 * cw, offsety + j as f64 * ch),
cw,
ch,
pad,
)
})
})
.collect::<Vec<Vec<(f64, f64)>>>();
vec![(routes, "black")]
.iter()
.enumerate()
.map(|(i, (routes, color))| {
let mut data = Data::new();
for route in routes.clone() {
data = render_route_curve(data, route);
}
let mut l = layer(format!("{} {}", i, String::from(*color)).as_str());
l = l.add(base_path(color, 0.35, data));
l
})
.collect()
}
fn | () {
let opts: Opts = Opts::parse();
let groups = art(&opts);
let mut document = base_document("white", opts.width, opts.height);
for g in groups {
document = document.add(g);
}
svg::save(opts.file, &document).unwrap();
}
struct PaintMask {
mask: Vec<bool>,
precision: f64,
width: f64,
height: f64,
}
impl PaintMask {
fn new(precision: f64, width: f64, height: f64) -> Self {
let wi = (width / precision) as usize;
| main | identifier_name |
main.rs | r + ox, y * r + oy);
route.push(p);
}
route.push(route[0]);
routes.push(route);
}
r -= dr;
}
routes
}
fn heart(ox: f64, oy: f64, r: f64, ang: f64) -> Vec<(f64, f64)> {
let mut route = Vec::new();
let count = (2.0 * PI * r / 0.5).floor() as usize;
for i in 0..count {
let a = i as f64 * 2.0 * PI / (count as f64);
let (x, y) = heart_function(a);
let (x, y) = p_r((x, y), ang);
let p = (x * r + ox, y * r + oy);
route.push(p);
}
route.push(route[0]);
route
}
fn heart_nested_rotating<R: Rng>(
rng: &mut R,
ox: f64,
oy: f64,
radius: f64,
extra_radius: f64,
dr: f64,
stopr: f64,
) -> Vec<Vec<(f64, f64)>> {
let mut routes = Vec::new();
let mut r = extra_radius;
let perlin = Perlin::new();
let seed = rng.gen_range(-555., 555.);
let f = rng.gen_range(0.05, 0.1) * rng.gen_range(0.2, 1.0);
let amp = rng.gen_range(0.03, 0.08) / f;
let basen = perlin.get([seed, f * r]);
while r > stopr {
let actualr = r.min(radius);
let count = (2.0 * PI * r / 0.5).floor() as usize;
if count > 3 {
let n = perlin.get([seed, f * r]) - basen;
let offr = n * amp;
let route = heart(ox, oy, actualr, offr);
routes.push(route);
}
r -= dr;
}
routes
}
fn cell(
seed: f64,
origin: (f64, f64),
width: f64,
height: f64,
pad: f64,
) -> Vec<Vec<(f64, f64)>> {
let mut routes = Vec::new();
let mut rng = rng_from_seed(seed);
let dr = rng.gen_range(0.6, 1.0);
let r = (width.min(height) / 2.0 - pad) * rng.gen_range(0.8, 1.0);
let r2 = r
* (1.0
+ rng.gen_range(0.0, 1.0)
* rng.gen_range(0.0, 1.0)
* rng.gen_range(-1.0f64, 1.0).max(0.0));
/*if rng.gen_bool(0.1) {
routes.extend(heart_nested(
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
dr,
));
} else */
if rng.gen_bool(0.1) {
routes.push(heart_spiral(
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
dr,
));
} else {
let stopr = if rng.gen_bool(0.5) {
rng.gen_range(0.1, 0.7) * r
} else {
0.1
};
routes.extend(heart_nested_rotating(
&mut rng,
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
r2,
dr,
stopr,
));
}
let mut mask = PaintMask::new(0.2, width, height);
let ppad = rng.gen_range(4.0, 6.0);
// TODO use a inner heart step as mask to make a white?
// to protect the paper from having too much passage, we will cut some lines based on a grid lookup.
let prec = 0.5;
let passage_limit = 10;
let minlen = 3;
let mut passage = Passage2DCounter::new(prec, width, height);
let mut paths = vec![];
for r in routes {
let mut localpassage = Passage2DCounter::new(prec, width, height);
let mut path: Vec<(f64, f64)> = vec![];
for p in r {
let localp = (p.0 - origin.0, p.1 - origin.1);
if passage.get(localp) > passage_limit {
if path.len() >= minlen {
paths.push(path);
}
path = vec![];
} else {
path.push(p);
}
localpassage.count(localp);
mask.paint_circle(&VCircle::new(p.0 - origin.0, p.1 - origin.1, ppad));
}
if path.len() >= minlen |
passage.count_once_from(&localpassage);
}
routes = paths;
let bounds = (pad, pad, width - pad, height - pad);
let in_shape = |p: (f64, f64)| -> bool {
!mask.is_painted(p) && strictly_in_boundaries(p, bounds)
};
let does_overlap = |c: &VCircle| {
in_shape((c.x, c.y))
&& circle_route((c.x, c.y), c.r, 8)
.iter()
.all(|&p| in_shape(p))
};
let ppad = rng.gen_range(0.4, 0.8);
let min = rng.gen_range(1.5, 2.0);
let max = min + rng.gen_range(0.0, 5.0);
let optim = rng.gen_range(1, 10);
let count = 2000;
let circles = packing(
&mut rng,
vec![],
5000000,
count,
optim,
ppad,
bounds,
&does_overlap,
min,
max,
);
let aligned = rng.gen_bool(0.3);
for c in circles {
let x = c.x + origin.0;
let y = c.y + origin.1;
let r = c.r;
let ang = if aligned {
0.
} else {
PI + (c.x - width / 2.0).atan2(c.y - height / 2.0)
};
routes.push(heart(x, y, r, ang));
}
routes
}
fn art(opts: &Opts) -> Vec<Group> {
let width = opts.width;
let height = opts.height;
let cw = width / 2.;
let ch = height / 2.;
let pad = 5.;
let cols = (width / cw).floor() as usize;
let rows = (height / ch).floor() as usize;
let offsetx = 0.0;
let offsety = 0.0;
let routes = (0..rows)
.into_par_iter()
.flat_map(|j| {
(0..cols).into_par_iter().flat_map(move |i| {
cell(
opts.seed / 7.7 + (i + j * cols) as f64 / 0.3,
(offsetx + i as f64 * cw, offsety + j as f64 * ch),
cw,
ch,
pad,
)
})
})
.collect::<Vec<Vec<(f64, f64)>>>();
vec![(routes, "black")]
.iter()
.enumerate()
.map(|(i, (routes, color))| {
let mut data = Data::new();
for route in routes.clone() {
data = render_route_curve(data, route);
}
let mut l = layer(format!("{} {}", i, String::from(*color)).as_str());
l = l.add(base_path(color, 0.35, data));
l
})
.collect()
}
fn main() {
let opts: Opts = Opts::parse();
let groups = art(&opts);
let mut document = base_document("white", opts.width, opts.height);
for g in groups {
document = document.add(g);
}
svg::save(opts.file, &document).unwrap();
}
struct PaintMask {
mask: Vec<bool>,
precision: f64,
width: f64,
height: f64,
}
impl PaintMask {
fn new(precision: f64, width: f64, height: f64) -> Self {
let wi = (width / precision) as usize;
| {
paths.push(path);
} | conditional_block |
main.rs | = |c: &VCircle| {
in_shape((c.x, c.y))
&& circle_route((c.x, c.y), c.r, 8)
.iter()
.all(|&p| in_shape(p))
};
let ppad = rng.gen_range(0.4, 0.8);
let min = rng.gen_range(1.5, 2.0);
let max = min + rng.gen_range(0.0, 5.0);
let optim = rng.gen_range(1, 10);
let count = 2000;
let circles = packing(
&mut rng,
vec![],
5000000,
count,
optim,
ppad,
bounds,
&does_overlap,
min,
max,
);
let aligned = rng.gen_bool(0.3);
for c in circles {
let x = c.x + origin.0;
let y = c.y + origin.1;
let r = c.r;
let ang = if aligned {
0.
} else {
PI + (c.x - width / 2.0).atan2(c.y - height / 2.0)
};
routes.push(heart(x, y, r, ang));
}
routes
}
fn art(opts: &Opts) -> Vec<Group> {
let width = opts.width;
let height = opts.height;
let cw = width / 2.;
let ch = height / 2.;
let pad = 5.;
let cols = (width / cw).floor() as usize;
let rows = (height / ch).floor() as usize;
let offsetx = 0.0;
let offsety = 0.0;
let routes = (0..rows)
.into_par_iter()
.flat_map(|j| {
(0..cols).into_par_iter().flat_map(move |i| {
cell(
opts.seed / 7.7 + (i + j * cols) as f64 / 0.3,
(offsetx + i as f64 * cw, offsety + j as f64 * ch),
cw,
ch,
pad,
)
})
})
.collect::<Vec<Vec<(f64, f64)>>>();
vec![(routes, "black")]
.iter()
.enumerate()
.map(|(i, (routes, color))| {
let mut data = Data::new();
for route in routes.clone() {
data = render_route_curve(data, route);
}
let mut l = layer(format!("{} {}", i, String::from(*color)).as_str());
l = l.add(base_path(color, 0.35, data));
l
})
.collect()
}
fn main() {
let opts: Opts = Opts::parse();
let groups = art(&opts);
let mut document = base_document("white", opts.width, opts.height);
for g in groups {
document = document.add(g);
}
svg::save(opts.file, &document).unwrap();
}
struct PaintMask {
mask: Vec<bool>,
precision: f64,
width: f64,
height: f64,
}
impl PaintMask {
fn new(precision: f64, width: f64, height: f64) -> Self {
let wi = (width / precision) as usize;
let hi = (height / precision) as usize;
Self {
mask: vec![false; wi * hi],
width,
height,
precision,
}
}
fn is_painted(&self, point: (f64, f64)) -> bool {
// check out of bounds
if point.0 <= 0.0
|| point.0 >= self.width
|| point.1 <= 0.0
|| point.1 >= self.height
{
return false;
}
let precision = self.precision;
let width = self.width;
let x = (point.0 / precision) as usize;
let y = (point.1 / precision) as usize;
let wi = (width / precision) as usize;
self.mask[x + y * wi]
}
fn paint_circle(&mut self, circle: &VCircle) {
let (minx, miny, maxx, maxy) = (
circle.x - circle.r,
circle.y - circle.r,
circle.x + circle.r,
circle.y + circle.r,
);
let precision = self.precision;
let width = self.width;
let minx = (minx / precision) as usize;
let miny = (miny / precision) as usize;
let maxx = (maxx / precision) as usize;
let maxy = (maxy / precision) as usize;
let wi = (width / precision) as usize;
let hi = (self.height / precision) as usize;
for x in minx..maxx {
if x >= wi {
continue;
}
for y in miny..maxy {
if y >= hi {
continue;
}
let point = (x as f64 * precision, y as f64 * precision);
if euclidian_dist(point, (circle.x, circle.y)) < circle.r {
self.mask[x + y * wi] = true;
}
}
}
}
}
#[derive(Clone, Copy, Debug)]
struct VCircle {
x: f64,
y: f64,
r: f64,
}
impl VCircle {
fn new(x: f64, y: f64, r: f64) -> Self {
VCircle { x, y, r }
}
fn dist(self: &Self, c: &VCircle) -> f64 {
euclidian_dist((self.x, self.y), (c.x, c.y)) - c.r - self.r
}
fn collides(self: &Self, c: &VCircle) -> bool {
self.dist(c) <= 0.0
}
}
fn scaling_search<F: FnMut(f64) -> bool>(
mut f: F,
min_scale: f64,
max_scale: f64,
) -> Option<f64> {
let mut from = min_scale;
let mut to = max_scale;
loop {
if !f(from) {
return None;
}
if to - from < 0.1 {
return Some(from);
}
let middle = (to + from) / 2.0;
if !f(middle) {
to = middle;
} else {
from = middle;
}
}
}
fn search_circle_radius(
does_overlap: &dyn Fn(&VCircle) -> bool,
circles: &Vec<VCircle>,
x: f64,
y: f64,
min_scale: f64,
max_scale: f64,
) -> Option<f64> {
let overlaps = |size| {
let c = VCircle::new(x, y, size);
does_overlap(&c) && !circles.iter().any(|other| c.collides(other))
};
scaling_search(overlaps, min_scale, max_scale)
}
fn packing<R: Rng>(
rng: &mut R,
initial_circles: Vec<VCircle>,
iterations: usize,
desired_count: usize,
optimize_size: usize,
pad: f64,
bound: (f64, f64, f64, f64),
does_overlap: &dyn Fn(&VCircle) -> bool,
min_scale: f64,
max_scale: f64,
) -> Vec<VCircle> {
let mut circles = initial_circles.clone();
let mut tries = Vec::new();
for _i in 0..iterations {
let x: f64 = rng.gen_range(bound.0, bound.2);
let y: f64 = rng.gen_range(bound.1, bound.3);
if let Some(size) =
search_circle_radius(&does_overlap, &circles, x, y, min_scale, max_scale)
{
let circle = VCircle::new(x, y, size - pad);
tries.push(circle);
if tries.len() > optimize_size {
tries.sort_by(|a, b| b.r.partial_cmp(&a.r).unwrap());
let c = tries[0];
circles.push(c.clone());
tries = Vec::new();
}
}
if circles.len() > desired_count {
break;
}
}
circles
}
pub struct Passage2DCounter {
granularity: f64,
width: f64,
height: f64,
counters: Vec<usize>,
}
impl Passage2DCounter {
pub fn new(granularity: f64, width: f64, height: f64) -> Self | {
let wi = (width / granularity).ceil() as usize;
let hi = (height / granularity).ceil() as usize;
let counters = vec![0; wi * hi];
Passage2DCounter {
granularity,
width,
height,
counters,
}
} | identifier_body |
|
main.rs | * r + ox, y * r + oy);
route.push(p);
}
route.push(route[0]);
routes.push(route);
}
r -= dr;
}
routes
}
fn heart(ox: f64, oy: f64, r: f64, ang: f64) -> Vec<(f64, f64)> {
let mut route = Vec::new();
let count = (2.0 * PI * r / 0.5).floor() as usize;
for i in 0..count {
let a = i as f64 * 2.0 * PI / (count as f64);
let (x, y) = heart_function(a);
let (x, y) = p_r((x, y), ang);
let p = (x * r + ox, y * r + oy);
route.push(p);
}
route.push(route[0]);
route
}
fn heart_nested_rotating<R: Rng>(
rng: &mut R,
ox: f64,
oy: f64,
radius: f64,
extra_radius: f64,
dr: f64,
stopr: f64,
) -> Vec<Vec<(f64, f64)>> {
let mut routes = Vec::new();
let mut r = extra_radius;
let perlin = Perlin::new();
let seed = rng.gen_range(-555., 555.);
let f = rng.gen_range(0.05, 0.1) * rng.gen_range(0.2, 1.0);
let amp = rng.gen_range(0.03, 0.08) / f;
let basen = perlin.get([seed, f * r]);
while r > stopr {
let actualr = r.min(radius);
let count = (2.0 * PI * r / 0.5).floor() as usize;
if count > 3 {
let n = perlin.get([seed, f * r]) - basen;
let offr = n * amp;
let route = heart(ox, oy, actualr, offr);
routes.push(route);
}
r -= dr;
}
routes
}
fn cell(
seed: f64,
origin: (f64, f64),
width: f64,
height: f64,
pad: f64,
) -> Vec<Vec<(f64, f64)>> {
let mut routes = Vec::new();
let mut rng = rng_from_seed(seed);
let dr = rng.gen_range(0.6, 1.0);
let r = (width.min(height) / 2.0 - pad) * rng.gen_range(0.8, 1.0);
let r2 = r
* (1.0
+ rng.gen_range(0.0, 1.0)
* rng.gen_range(0.0, 1.0)
* rng.gen_range(-1.0f64, 1.0).max(0.0));
/*if rng.gen_bool(0.1) {
routes.extend(heart_nested(
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
dr,
));
} else */
if rng.gen_bool(0.1) {
routes.push(heart_spiral(
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
dr,
));
} else {
let stopr = if rng.gen_bool(0.5) {
rng.gen_range(0.1, 0.7) * r
} else {
0.1
};
routes.extend(heart_nested_rotating(
&mut rng,
origin.0 + width / 2.0,
origin.1 + height / 2.0,
r,
r2,
dr,
stopr,
));
}
let mut mask = PaintMask::new(0.2, width, height);
let ppad = rng.gen_range(4.0, 6.0);
// TODO use a inner heart step as mask to make a white?
// to protect the paper from having too much passage, we will cut some lines based on a grid lookup.
let prec = 0.5;
let passage_limit = 10;
let minlen = 3;
let mut passage = Passage2DCounter::new(prec, width, height);
let mut paths = vec![];
for r in routes {
let mut localpassage = Passage2DCounter::new(prec, width, height);
let mut path: Vec<(f64, f64)> = vec![];
for p in r {
let localp = (p.0 - origin.0, p.1 - origin.1);
if passage.get(localp) > passage_limit {
if path.len() >= minlen {
paths.push(path);
}
path = vec![];
} else {
path.push(p);
}
localpassage.count(localp);
mask.paint_circle(&VCircle::new(p.0 - origin.0, p.1 - origin.1, ppad));
}
if path.len() >= minlen {
paths.push(path);
}
passage.count_once_from(&localpassage);
}
routes = paths;
let bounds = (pad, pad, width - pad, height - pad);
let in_shape = |p: (f64, f64)| -> bool {
!mask.is_painted(p) && strictly_in_boundaries(p, bounds)
};
let does_overlap = |c: &VCircle| {
in_shape((c.x, c.y))
&& circle_route((c.x, c.y), c.r, 8)
.iter()
.all(|&p| in_shape(p))
};
let ppad = rng.gen_range(0.4, 0.8);
let min = rng.gen_range(1.5, 2.0);
let max = min + rng.gen_range(0.0, 5.0);
let optim = rng.gen_range(1, 10);
let count = 2000;
let circles = packing(
&mut rng,
vec![],
5000000,
count,
optim,
ppad,
bounds,
&does_overlap,
min,
max,
);
let aligned = rng.gen_bool(0.3);
for c in circles {
let x = c.x + origin.0;
let y = c.y + origin.1;
let r = c.r;
let ang = if aligned {
0.
} else { | routes.push(heart(x, y, r, ang));
}
routes
}
fn art(opts: &Opts) -> Vec<Group> {
let width = opts.width;
let height = opts.height;
let cw = width / 2.;
let ch = height / 2.;
let pad = 5.;
let cols = (width / cw).floor() as usize;
let rows = (height / ch).floor() as usize;
let offsetx = 0.0;
let offsety = 0.0;
let routes = (0..rows)
.into_par_iter()
.flat_map(|j| {
(0..cols).into_par_iter().flat_map(move |i| {
cell(
opts.seed / 7.7 + (i + j * cols) as f64 / 0.3,
(offsetx + i as f64 * cw, offsety + j as f64 * ch),
cw,
ch,
pad,
)
})
})
.collect::<Vec<Vec<(f64, f64)>>>();
vec![(routes, "black")]
.iter()
.enumerate()
.map(|(i, (routes, color))| {
let mut data = Data::new();
for route in routes.clone() {
data = render_route_curve(data, route);
}
let mut l = layer(format!("{} {}", i, String::from(*color)).as_str());
l = l.add(base_path(color, 0.35, data));
l
})
.collect()
}
fn main() {
let opts: Opts = Opts::parse();
let groups = art(&opts);
let mut document = base_document("white", opts.width, opts.height);
for g in groups {
document = document.add(g);
}
svg::save(opts.file, &document).unwrap();
}
struct PaintMask {
mask: Vec<bool>,
precision: f64,
width: f64,
height: f64,
}
impl PaintMask {
fn new(precision: f64, width: f64, height: f64) -> Self {
let wi = (width / precision) as usize;
| PI + (c.x - width / 2.0).atan2(c.y - height / 2.0)
}; | random_line_split |
config.go | :"DRONE_SERVER_HOST" default:"localhost:8080"`
Port string `envconfig:"DRONE_SERVER_PORT" default:":8080"`
Proto string `envconfig:"DRONE_SERVER_PROTO" default:"http"`
Pprof bool `envconfig:"DRONE_PPROF_ENABLED"`
Acme bool `envconfig:"DRONE_TLS_AUTOCERT"`
Email string `envconfig:"DRONE_TLS_EMAIL"`
Cert string `envconfig:"DRONE_TLS_CERT"`
Key string `envconfig:"DRONE_TLS_KEY"`
}
// Proxy provides proxy server configuration.
Proxy struct {
Addr string `envconfig:"-"`
Host string `envconfig:"DRONE_SERVER_PROXY_HOST"`
Proto string `envconfig:"DRONE_SERVER_PROXY_PROTO"`
}
// Registration configuration.
Registration struct {
Closed bool `envconfig:"DRONE_REGISTRATION_CLOSED"`
}
// Authentication Controller configuration
Authentication struct {
Endpoint string `envconfig:"DRONE_ADMISSION_PLUGIN_ENDPOINT"`
Secret string `envconfig:"DRONE_ADMISSION_PLUGIN_SECRET"`
SkipVerify bool `envconfig:"DRONE_ADMISSION_PLUGIN_SKIP_VERIFY"`
}
// Session provides the session configuration.
Session struct {
Timeout time.Duration `envconfig:"DRONE_COOKIE_TIMEOUT" default:"720h"`
Secret string `envconfig:"DRONE_COOKIE_SECRET"`
Secure bool `envconfig:"DRONE_COOKIE_SECURE"`
MappingFile string `envconfig:"DRONE_LEGACY_TOKEN_MAPPING_FILE"`
}
// Status provides status configurations.
Status struct {
Disabled bool `envconfig:"DRONE_STATUS_DISABLED"`
Name string `envconfig:"DRONE_STATUS_NAME"`
}
// Users provides the user configuration.
Users struct {
Create UserCreate `envconfig:"DRONE_USER_CREATE"`
Filter []string `envconfig:"DRONE_USER_FILTER"`
MinAge time.Duration `envconfig:"DRONE_MIN_AGE"`
}
// Webhook provides the webhook configuration.
Webhook struct {
Events []string `envconfig:"DRONE_WEBHOOK_EVENTS"`
Endpoint []string `envconfig:"DRONE_WEBHOOK_ENDPOINT"`
Secret string `envconfig:"DRONE_WEBHOOK_SECRET"`
SkipVerify bool `envconfig:"DRONE_WEBHOOK_SKIP_VERIFY"`
}
// Yaml provides the yaml webhook configuration.
Yaml struct {
Endpoint string `envconfig:"DRONE_YAML_ENDPOINT"`
Secret string `envconfig:"DRONE_YAML_SECRET"`
SkipVerify bool `envconfig:"DRONE_YAML_SKIP_VERIFY"`
}
// Convert provides the converter webhook configuration.
Convert struct {
Extension string `envconfig:"DRONE_CONVERT_PLUGIN_EXTENSION"`
Endpoint string `envconfig:"DRONE_CONVERT_PLUGIN_ENDPOINT"`
Secret string `envconfig:"DRONE_CONVERT_PLUGIN_SECRET"`
SkipVerify bool `envconfig:"DRONE_CONVERT_PLUGIN_SKIP_VERIFY"`
}
// Validate provides the validation webhook configuration.
Validate struct {
Endpoint string `envconfig:"DRONE_VALIDATE_PLUGIN_ENDPOINT"`
Secret string `envconfig:"DRONE_VALIDATE_PLUGIN_SECRET"`
SkipVerify bool `envconfig:"DRONE_VALIDATE_PLUGIN_SKIP_VERIFY"`
}
//
// Source code management.
//
// Bitbucket provides the bitbucket client configuration.
Bitbucket struct {
ClientID string `envconfig:"DRONE_BITBUCKET_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_BITBUCKET_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_BITBUCKET_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_BITBUCKET_DEBUG"`
}
// Gitea provides the gitea client configuration.
Gitea struct {
Server string `envconfig:"DRONE_GITEA_SERVER"`
ClientID string `envconfig:"DRONE_GITEA_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITEA_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITEA_SKIP_VERIFY"`
Scope []string `envconfig:"DRONE_GITEA_SCOPE" default:"repo,repo:status,user:email,read:org"`
Debug bool `envconfig:"DRONE_GITEA_DEBUG"`
}
// Github provides the github client configuration.
Github struct {
Server string `envconfig:"DRONE_GITHUB_SERVER" default:"https://github.com"`
APIServer string `envconfig:"DRONE_GITHUB_API_SERVER"`
ClientID string `envconfig:"DRONE_GITHUB_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITHUB_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITHUB_SKIP_VERIFY"`
Scope []string `envconfig:"DRONE_GITHUB_SCOPE" default:"repo,repo:status,user:email,read:org"`
RateLimit int `envconfig:"DRONE_GITHUB_USER_RATELIMIT"`
Debug bool `envconfig:"DRONE_GITHUB_DEBUG"`
}
// GitLab provides the gitlab client configuration.
GitLab struct {
Server string `envconfig:"DRONE_GITLAB_SERVER" default:"https://gitlab.com"`
ClientID string `envconfig:"DRONE_GITLAB_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITLAB_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITLAB_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_GITLAB_DEBUG"`
}
// Gogs provides the gogs client configuration.
Gogs struct {
Server string `envconfig:"DRONE_GOGS_SERVER"`
SkipVerify bool `envconfig:"DRONE_GOGS_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_GOGS_DEBUG"`
}
// Stash provides the stash client configuration.
Stash struct {
Server string `envconfig:"DRONE_STASH_SERVER"`
ConsumerKey string `envconfig:"DRONE_STASH_CONSUMER_KEY"`
ConsumerSecret string `envconfig:"DRONE_STASH_CONSUMER_SECRET"`
PrivateKey string `envconfig:"DRONE_STASH_PRIVATE_KEY"`
SkipVerify bool `envconfig:"DRONE_STASH_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_STASH_DEBUG"`
}
// S3 provides the storage configuration.
S3 struct {
Bucket string `envconfig:"DRONE_S3_BUCKET"`
Prefix string `envconfig:"DRONE_S3_PREFIX"`
Endpoint string `envconfig:"DRONE_S3_ENDPOINT"`
PathStyle bool `envconfig:"DRONE_S3_PATH_STYLE"`
}
//AzureBlob providers the storage configuration.
AzureBlob struct {
ContainerName string `envconfig:"DRONE_AZURE_BLOB_CONTAINER_NAME"`
StorageAccountName string `envconfig:"DRONE_AZURE_STORAGE_ACCOUNT_NAME"`
StorageAccessKey string `envconfig:"DRONE_AZURE_STORAGE_ACCESS_KEY"`
}
// HTTP provides http configuration.
HTTP struct {
AllowedHosts []string `envconfig:"DRONE_HTTP_ALLOWED_HOSTS"`
HostsProxyHeaders []string `envconfig:"DRONE_HTTP_PROXY_HEADERS"`
SSLRedirect bool `envconfig:"DRONE_HTTP_SSL_REDIRECT"`
SSLTemporaryRedirect bool `envconfig:"DRONE_HTTP_SSL_TEMPORARY_REDIRECT"`
SSLHost string `envconfig:"DRONE_HTTP_SSL_HOST"`
SSLProxyHeaders map[string]string `envconfig:"DRONE_HTTP_SSL_PROXY_HEADERS"`
STSSeconds int64 `envconfig:"DRONE_HTTP_STS_SECONDS"`
STSIncludeSubdomains bool `envconfig:"DRONE_HTTP_STS_INCLUDE_SUBDOMAINS"`
STSPreload bool `envconfig:"DRONE_HTTP_STS_PRELOAD"`
ForceSTSHeader bool `envconfig:"DRONE_HTTP_STS_FORCE_HEADER"`
BrowserXSSFilter bool `envconfig:"DRONE_HTTP_BROWSER_XSS_FILTER" default:"true"`
FrameDeny bool `envconfig:"DRONE_HTTP_FRAME_DENY" default:"true"`
ContentTypeNosniff bool `envconfig:"DRONE_HTTP_CONTENT_TYPE_NO_SNIFF"`
ContentSecurityPolicy string `envconfig:"DRONE_HTTP_CONTENT_SECURITY_POLICY"`
ReferrerPolicy string `envconfig:"DRONE_HTTP_REFERRER_POLICY"`
}
)
// Environ returns the settings from the environment.
func Environ() (Config, error) {
cfg := Config{}
err := envconfig.Process("", &cfg)
defaultAddress(&cfg)
defaultProxy(&cfg)
defaultRunner(&cfg)
defaultSession(&cfg)
defaultCallback(&cfg)
configureGithub(&cfg)
if err := kubernetesServiceConflict(&cfg); err != nil {
return cfg, err
}
return cfg, err
}
// String returns the configuration in string format.
func (c *Config) String() string {
out, _ := yaml.Marshal(c)
return string(out)
}
// IsGitHub returns true if the GitHub integration
// is activated.
func (c *Config) | IsGitHub | identifier_name |
|
config.go | Secret string `envconfig:"DRONE_WEBHOOK_SECRET"`
SkipVerify bool `envconfig:"DRONE_WEBHOOK_SKIP_VERIFY"`
}
// Yaml provides the yaml webhook configuration.
Yaml struct {
Endpoint string `envconfig:"DRONE_YAML_ENDPOINT"`
Secret string `envconfig:"DRONE_YAML_SECRET"`
SkipVerify bool `envconfig:"DRONE_YAML_SKIP_VERIFY"`
}
// Convert provides the converter webhook configuration.
Convert struct {
Extension string `envconfig:"DRONE_CONVERT_PLUGIN_EXTENSION"`
Endpoint string `envconfig:"DRONE_CONVERT_PLUGIN_ENDPOINT"`
Secret string `envconfig:"DRONE_CONVERT_PLUGIN_SECRET"`
SkipVerify bool `envconfig:"DRONE_CONVERT_PLUGIN_SKIP_VERIFY"`
}
// Validate provides the validation webhook configuration.
Validate struct {
Endpoint string `envconfig:"DRONE_VALIDATE_PLUGIN_ENDPOINT"`
Secret string `envconfig:"DRONE_VALIDATE_PLUGIN_SECRET"`
SkipVerify bool `envconfig:"DRONE_VALIDATE_PLUGIN_SKIP_VERIFY"`
}
//
// Source code management.
//
// Bitbucket provides the bitbucket client configuration.
Bitbucket struct {
ClientID string `envconfig:"DRONE_BITBUCKET_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_BITBUCKET_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_BITBUCKET_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_BITBUCKET_DEBUG"`
}
// Gitea provides the gitea client configuration.
Gitea struct {
Server string `envconfig:"DRONE_GITEA_SERVER"`
ClientID string `envconfig:"DRONE_GITEA_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITEA_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITEA_SKIP_VERIFY"`
Scope []string `envconfig:"DRONE_GITEA_SCOPE" default:"repo,repo:status,user:email,read:org"`
Debug bool `envconfig:"DRONE_GITEA_DEBUG"`
}
// Github provides the github client configuration.
Github struct {
Server string `envconfig:"DRONE_GITHUB_SERVER" default:"https://github.com"`
APIServer string `envconfig:"DRONE_GITHUB_API_SERVER"`
ClientID string `envconfig:"DRONE_GITHUB_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITHUB_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITHUB_SKIP_VERIFY"`
Scope []string `envconfig:"DRONE_GITHUB_SCOPE" default:"repo,repo:status,user:email,read:org"`
RateLimit int `envconfig:"DRONE_GITHUB_USER_RATELIMIT"`
Debug bool `envconfig:"DRONE_GITHUB_DEBUG"`
}
// GitLab provides the gitlab client configuration.
GitLab struct {
Server string `envconfig:"DRONE_GITLAB_SERVER" default:"https://gitlab.com"`
ClientID string `envconfig:"DRONE_GITLAB_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITLAB_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITLAB_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_GITLAB_DEBUG"`
}
// Gogs provides the gogs client configuration.
Gogs struct {
Server string `envconfig:"DRONE_GOGS_SERVER"`
SkipVerify bool `envconfig:"DRONE_GOGS_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_GOGS_DEBUG"`
}
// Stash provides the stash client configuration.
Stash struct {
Server string `envconfig:"DRONE_STASH_SERVER"`
ConsumerKey string `envconfig:"DRONE_STASH_CONSUMER_KEY"`
ConsumerSecret string `envconfig:"DRONE_STASH_CONSUMER_SECRET"`
PrivateKey string `envconfig:"DRONE_STASH_PRIVATE_KEY"`
SkipVerify bool `envconfig:"DRONE_STASH_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_STASH_DEBUG"`
}
// S3 provides the storage configuration.
S3 struct {
Bucket string `envconfig:"DRONE_S3_BUCKET"`
Prefix string `envconfig:"DRONE_S3_PREFIX"`
Endpoint string `envconfig:"DRONE_S3_ENDPOINT"`
PathStyle bool `envconfig:"DRONE_S3_PATH_STYLE"`
}
//AzureBlob providers the storage configuration.
AzureBlob struct {
ContainerName string `envconfig:"DRONE_AZURE_BLOB_CONTAINER_NAME"`
StorageAccountName string `envconfig:"DRONE_AZURE_STORAGE_ACCOUNT_NAME"`
StorageAccessKey string `envconfig:"DRONE_AZURE_STORAGE_ACCESS_KEY"`
}
// HTTP provides http configuration.
HTTP struct {
AllowedHosts []string `envconfig:"DRONE_HTTP_ALLOWED_HOSTS"`
HostsProxyHeaders []string `envconfig:"DRONE_HTTP_PROXY_HEADERS"`
SSLRedirect bool `envconfig:"DRONE_HTTP_SSL_REDIRECT"`
SSLTemporaryRedirect bool `envconfig:"DRONE_HTTP_SSL_TEMPORARY_REDIRECT"`
SSLHost string `envconfig:"DRONE_HTTP_SSL_HOST"`
SSLProxyHeaders map[string]string `envconfig:"DRONE_HTTP_SSL_PROXY_HEADERS"`
STSSeconds int64 `envconfig:"DRONE_HTTP_STS_SECONDS"`
STSIncludeSubdomains bool `envconfig:"DRONE_HTTP_STS_INCLUDE_SUBDOMAINS"`
STSPreload bool `envconfig:"DRONE_HTTP_STS_PRELOAD"`
ForceSTSHeader bool `envconfig:"DRONE_HTTP_STS_FORCE_HEADER"`
BrowserXSSFilter bool `envconfig:"DRONE_HTTP_BROWSER_XSS_FILTER" default:"true"`
FrameDeny bool `envconfig:"DRONE_HTTP_FRAME_DENY" default:"true"`
ContentTypeNosniff bool `envconfig:"DRONE_HTTP_CONTENT_TYPE_NO_SNIFF"`
ContentSecurityPolicy string `envconfig:"DRONE_HTTP_CONTENT_SECURITY_POLICY"`
ReferrerPolicy string `envconfig:"DRONE_HTTP_REFERRER_POLICY"`
}
)
// Environ returns the settings from the environment.
func Environ() (Config, error) {
cfg := Config{}
err := envconfig.Process("", &cfg)
defaultAddress(&cfg)
defaultProxy(&cfg)
defaultRunner(&cfg)
defaultSession(&cfg)
defaultCallback(&cfg)
configureGithub(&cfg)
if err := kubernetesServiceConflict(&cfg); err != nil {
return cfg, err
}
return cfg, err
}
// String returns the configuration in string format.
func (c *Config) String() string {
out, _ := yaml.Marshal(c)
return string(out)
}
// IsGitHub returns true if the GitHub integration
// is activated.
func (c *Config) IsGitHub() bool {
return c.Github.ClientID != ""
}
// IsGitHubEnterprise returns true if the GitHub
// integration is activated.
func (c *Config) IsGitHubEnterprise() bool {
return c.IsGitHub() && !strings.HasPrefix(c.Github.Server, "https://github.com")
}
// IsGitLab returns true if the GitLab integration
// is activated.
func (c *Config) IsGitLab() bool {
return c.GitLab.ClientID != ""
}
// IsGogs returns true if the Gogs integration
// is activated.
func (c *Config) IsGogs() bool {
return c.Gogs.Server != ""
}
// IsGitea returns true if the Gitea integration
// is activated.
func (c *Config) IsGitea() bool {
return c.Gitea.Server != ""
}
// IsBitbucket returns true if the Bitbucket Cloud
// integration is activated.
func (c *Config) IsBitbucket() bool {
return c.Bitbucket.ClientID != ""
}
// IsStash returns true if the Atlassian Stash
// integration is activated.
func (c *Config) IsStash() bool {
return c.Stash.Server != ""
}
func defaultAddress(c *Config) {
if c.Server.Key != "" || c.Server.Cert != "" || c.Server.Acme {
c.Server.Port = ":443"
c.Server.Proto = "https"
}
c.Server.Addr = c.Server.Proto + "://" + c.Server.Host
}
func defaultProxy(c *Config) {
if c.Proxy.Host == "" {
c.Proxy.Host = c.Server.Host
}
if c.Proxy.Proto == "" {
c.Proxy.Proto = c.Server.Proto
}
c.Proxy.Addr = c.Proxy.Proto + "://" + c.Proxy.Host
}
func defaultCallback(c *Config) {
if c.RPC.Host == "" {
c.RPC.Host = c.Server.Host
}
if c.RPC.Proto == "" {
c.RPC.Proto = c.Server.Proto
}
}
func defaultRunner(c *Config) {
if c.Runner.Machine == "" {
c.Runner.Machine = hostname
}
parts := strings.Split(c.Runner.Platform, "/")
if len(parts) == 2 && c.Runner.OS == "" | {
c.Runner.OS = parts[0]
} | conditional_block |
|
config.go |
Jsonnet struct {
Enabled bool `envconfig:"DRONE_JSONNET_ENABLED"`
}
// Kubernetes provides kubernetes configuration
Kubernetes struct {
Enabled bool `envconfig:"DRONE_KUBERNETES_ENABLED"`
Namespace string `envconfig:"DRONE_KUBERNETES_NAMESPACE"`
Path string `envconfig:"DRONE_KUBERNETES_CONFIG_PATH"`
URL string `envconfig:"DRONE_KUBERNETES_CONFIG_URL"`
TTL int `envconfig:"DRONE_KUBERNETES_TTL_AFTER_FINISHED" default:"300"`
ServiceAccountName string `envconfig:"DRONE_KUBERNETES_SERVICE_ACCOUNT"`
PullPolicy string `envconfig:"DRONE_KUBERNETES_IMAGE_PULL" default:"Always"`
Image string `envconfig:"DRONE_KUBERNETES_IMAGE"`
}
// Nomad configuration.
Nomad struct {
Enabled bool `envconfig:"DRONE_NOMAD_ENABLED"`
Datacenters []string `envconfig:"DRONE_NOMAD_DATACENTER" default:"dc1"`
Namespace string `envconfig:"DRONE_NOMAD_NAMESPACE"`
Region string `envconfig:"DRONE_NOMAD_REGION"`
Prefix string `envconfig:"DRONE_NOMAD_JOB_PREFIX" default:"drone-job-"`
Image string `envconfig:"DRONE_NOMAD_IMAGE"`
ImagePull bool `envconfig:"DRONE_NOMAD_IMAGE_PULL"`
Memory int `envconfig:"DRONE_NOMAD_DEFAULT_RAM" default:"1024"`
Labels map[string]string `envconfig:"DRONE_NOMAD_LABELS"`
CPU int `envconfig:"DRONE_NOMAD_DEFAULT_CPU" default:"500"`
}
// License provides license configuration
License struct {
Key string `envconfig:"DRONE_LICENSE"`
Endpoint string `envconfig:"DRONE_LICENSE_ENDPOINT"`
}
// Logging provides the logging configuration.
Logging struct {
Debug bool `envconfig:"DRONE_LOGS_DEBUG"`
Trace bool `envconfig:"DRONE_LOGS_TRACE"`
Color bool `envconfig:"DRONE_LOGS_COLOR"`
Pretty bool `envconfig:"DRONE_LOGS_PRETTY"`
Text bool `envconfig:"DRONE_LOGS_TEXT"`
}
// Prometheus provides the prometheus configuration.
Prometheus struct {
EnableAnonymousAccess bool `envconfig:"DRONE_PROMETHEUS_ANONYMOUS_ACCESS" default:"false"`
}
// Repository provides the repository configuration.
Repository struct {
Filter []string `envconfig:"DRONE_REPOSITORY_FILTER"`
Visibility string `envconfig:"DRONE_REPOSITORY_VISIBILITY"`
Trusted bool `envconfig:"DRONE_REPOSITORY_TRUSTED"`
}
// Registries provides the registry configuration.
Registries struct {
Endpoint string `envconfig:"DRONE_REGISTRY_ENDPOINT"`
Password string `envconfig:"DRONE_REGISTRY_SECRET"`
SkipVerify bool `envconfig:"DRONE_REGISTRY_SKIP_VERIFY"`
}
// Secrets provides the secret configuration.
Secrets struct {
Endpoint string `envconfig:"DRONE_SECRET_ENDPOINT"`
Password string `envconfig:"DRONE_SECRET_SECRET"`
SkipVerify bool `envconfig:"DRONE_SECRET_SKIP_VERIFY"`
}
// RPC provides the rpc configuration.
RPC struct {
Server string `envconfig:"DRONE_RPC_SERVER"` | }
Agent struct {
Disabled bool `envconfig:"DRONE_AGENTS_DISABLED"`
}
// Runner provides the runner configuration.
Runner struct {
Local bool `envconfig:"DRONE_RUNNER_LOCAL"`
Image string `envconfig:"DRONE_RUNNER_IMAGE" default:"drone/controller:1"`
Platform string `envconfig:"DRONE_RUNNER_PLATFORM" default:"linux/amd64"`
OS string `envconfig:"DRONE_RUNNER_OS"`
Arch string `envconfig:"DRONE_RUNNER_ARCH"`
Kernel string `envconfig:"DRONE_RUNNER_KERNEL"`
Variant string `envconfig:"DRONE_RUNNER_VARIANT"`
Machine string `envconfig:"DRONE_RUNNER_NAME"`
Capacity int `envconfig:"DRONE_RUNNER_CAPACITY" default:"2"`
Labels map[string]string `envconfig:"DRONE_RUNNER_LABELS"`
Volumes []string `envconfig:"DRONE_RUNNER_VOLUMES"`
Networks []string `envconfig:"DRONE_RUNNER_NETWORKS"`
Devices []string `envconfig:"DRONE_RUNNER_DEVICES"`
Privileged []string `envconfig:"DRONE_RUNNER_PRIVILEGED_IMAGES"`
Environ map[string]string `envconfig:"DRONE_RUNNER_ENVIRON"`
Limits struct {
MemSwapLimit Bytes `envconfig:"DRONE_LIMIT_MEM_SWAP"`
MemLimit Bytes `envconfig:"DRONE_LIMIT_MEM"`
ShmSize Bytes `envconfig:"DRONE_LIMIT_SHM_SIZE"`
CPUQuota int64 `envconfig:"DRONE_LIMIT_CPU_QUOTA"`
CPUShares int64 `envconfig:"DRONE_LIMIT_CPU_SHARES"`
CPUSet string `envconfig:"DRONE_LIMIT_CPU_SET"`
}
}
// Server provides the server configuration.
Server struct {
Addr string `envconfig:"-"`
Host string `envconfig:"DRONE_SERVER_HOST" default:"localhost:8080"`
Port string `envconfig:"DRONE_SERVER_PORT" default:":8080"`
Proto string `envconfig:"DRONE_SERVER_PROTO" default:"http"`
Pprof bool `envconfig:"DRONE_PPROF_ENABLED"`
Acme bool `envconfig:"DRONE_TLS_AUTOCERT"`
Email string `envconfig:"DRONE_TLS_EMAIL"`
Cert string `envconfig:"DRONE_TLS_CERT"`
Key string `envconfig:"DRONE_TLS_KEY"`
}
// Proxy provides proxy server configuration.
Proxy struct {
Addr string `envconfig:"-"`
Host string `envconfig:"DRONE_SERVER_PROXY_HOST"`
Proto string `envconfig:"DRONE_SERVER_PROXY_PROTO"`
}
// Registration configuration.
Registration struct {
Closed bool `envconfig:"DRONE_REGISTRATION_CLOSED"`
}
// Authentication Controller configuration
Authentication struct {
Endpoint string `envconfig:"DRONE_ADMISSION_PLUGIN_ENDPOINT"`
Secret string `envconfig:"DRONE_ADMISSION_PLUGIN_SECRET"`
SkipVerify bool `envconfig:"DRONE_ADMISSION_PLUGIN_SKIP_VERIFY"`
}
// Session provides the session configuration.
Session struct {
Timeout time.Duration `envconfig:"DRONE_COOKIE_TIMEOUT" default:"720h"`
Secret string `envconfig:"DRONE_COOKIE_SECRET"`
Secure bool `envconfig:"DRONE_COOKIE_SECURE"`
MappingFile string `envconfig:"DRONE_LEGACY_TOKEN_MAPPING_FILE"`
}
// Status provides status configurations.
Status struct {
Disabled bool `envconfig:"DRONE_STATUS_DISABLED"`
Name string `envconfig:"DRONE_STATUS_NAME"`
}
// Users provides the user configuration.
Users struct {
Create UserCreate `envconfig:"DRONE_USER_CREATE"`
Filter []string `envconfig:"DRONE_USER_FILTER"`
MinAge time.Duration `envconfig:"DRONE_MIN_AGE"`
}
// Webhook provides the webhook configuration.
Webhook struct {
Events []string `envconfig:"DRONE_WEBHOOK_EVENTS"`
Endpoint []string `envconfig:"DRONE_WEBHOOK_ENDPOINT"`
Secret string `envconfig:"DRONE_WEBHOOK_SECRET"`
SkipVerify bool `envconfig:"DRONE_WEBHOOK_SKIP_VERIFY"`
}
// Yaml provides the yaml webhook configuration.
Yaml struct {
Endpoint string `envconfig:"DRONE_YAML_ENDPOINT"`
Secret string `envconfig:"DRONE_YAML_SECRET"`
SkipVerify bool `envconfig:"DRONE_YAML_SKIP_VERIFY"`
}
// Convert provides the converter webhook configuration.
Convert struct {
Extension string `envconfig:"DRONE_CONVERT_PLUGIN_EXTENSION"`
Endpoint string `envconfig:"DRONE_CONVERT_PLUGIN_ENDPOINT"`
Secret string `envconfig:"DRONE_CONVERT_PLUGIN_SECRET"`
SkipVerify bool `envconfig:"DRONE_CONVERT_PLUGIN_SKIP_VERIFY"`
}
// Validate provides the validation webhook configuration.
Validate struct {
Endpoint string `envconfig:"DRONE_VALIDATE_PLUGIN_ENDPOINT"`
Secret string `envconfig:"DRONE_VALIDATE_PLUGIN_SECRET"`
SkipVerify bool `envconfig:"DRONE | Secret string `envconfig:"DRONE_RPC_SECRET"`
Debug bool `envconfig:"DRONE_RPC_DEBUG"`
Host string `envconfig:"DRONE_RPC_HOST"`
Proto string `envconfig:"DRONE_RPC_PROTO"`
// Hosts map[string]string `envconfig:"DRONE_RPC_EXTRA_HOSTS"` | random_line_split |
config.go | gitea client configuration.
Gitea struct {
Server string `envconfig:"DRONE_GITEA_SERVER"`
ClientID string `envconfig:"DRONE_GITEA_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITEA_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITEA_SKIP_VERIFY"`
Scope []string `envconfig:"DRONE_GITEA_SCOPE" default:"repo,repo:status,user:email,read:org"`
Debug bool `envconfig:"DRONE_GITEA_DEBUG"`
}
// Github provides the github client configuration.
Github struct {
Server string `envconfig:"DRONE_GITHUB_SERVER" default:"https://github.com"`
APIServer string `envconfig:"DRONE_GITHUB_API_SERVER"`
ClientID string `envconfig:"DRONE_GITHUB_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITHUB_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITHUB_SKIP_VERIFY"`
Scope []string `envconfig:"DRONE_GITHUB_SCOPE" default:"repo,repo:status,user:email,read:org"`
RateLimit int `envconfig:"DRONE_GITHUB_USER_RATELIMIT"`
Debug bool `envconfig:"DRONE_GITHUB_DEBUG"`
}
// GitLab provides the gitlab client configuration.
GitLab struct {
Server string `envconfig:"DRONE_GITLAB_SERVER" default:"https://gitlab.com"`
ClientID string `envconfig:"DRONE_GITLAB_CLIENT_ID"`
ClientSecret string `envconfig:"DRONE_GITLAB_CLIENT_SECRET"`
SkipVerify bool `envconfig:"DRONE_GITLAB_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_GITLAB_DEBUG"`
}
// Gogs provides the gogs client configuration.
Gogs struct {
Server string `envconfig:"DRONE_GOGS_SERVER"`
SkipVerify bool `envconfig:"DRONE_GOGS_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_GOGS_DEBUG"`
}
// Stash provides the stash client configuration.
Stash struct {
Server string `envconfig:"DRONE_STASH_SERVER"`
ConsumerKey string `envconfig:"DRONE_STASH_CONSUMER_KEY"`
ConsumerSecret string `envconfig:"DRONE_STASH_CONSUMER_SECRET"`
PrivateKey string `envconfig:"DRONE_STASH_PRIVATE_KEY"`
SkipVerify bool `envconfig:"DRONE_STASH_SKIP_VERIFY"`
Debug bool `envconfig:"DRONE_STASH_DEBUG"`
}
// S3 provides the storage configuration.
S3 struct {
Bucket string `envconfig:"DRONE_S3_BUCKET"`
Prefix string `envconfig:"DRONE_S3_PREFIX"`
Endpoint string `envconfig:"DRONE_S3_ENDPOINT"`
PathStyle bool `envconfig:"DRONE_S3_PATH_STYLE"`
}
//AzureBlob providers the storage configuration.
AzureBlob struct {
ContainerName string `envconfig:"DRONE_AZURE_BLOB_CONTAINER_NAME"`
StorageAccountName string `envconfig:"DRONE_AZURE_STORAGE_ACCOUNT_NAME"`
StorageAccessKey string `envconfig:"DRONE_AZURE_STORAGE_ACCESS_KEY"`
}
// HTTP provides http configuration.
HTTP struct {
AllowedHosts []string `envconfig:"DRONE_HTTP_ALLOWED_HOSTS"`
HostsProxyHeaders []string `envconfig:"DRONE_HTTP_PROXY_HEADERS"`
SSLRedirect bool `envconfig:"DRONE_HTTP_SSL_REDIRECT"`
SSLTemporaryRedirect bool `envconfig:"DRONE_HTTP_SSL_TEMPORARY_REDIRECT"`
SSLHost string `envconfig:"DRONE_HTTP_SSL_HOST"`
SSLProxyHeaders map[string]string `envconfig:"DRONE_HTTP_SSL_PROXY_HEADERS"`
STSSeconds int64 `envconfig:"DRONE_HTTP_STS_SECONDS"`
STSIncludeSubdomains bool `envconfig:"DRONE_HTTP_STS_INCLUDE_SUBDOMAINS"`
STSPreload bool `envconfig:"DRONE_HTTP_STS_PRELOAD"`
ForceSTSHeader bool `envconfig:"DRONE_HTTP_STS_FORCE_HEADER"`
BrowserXSSFilter bool `envconfig:"DRONE_HTTP_BROWSER_XSS_FILTER" default:"true"`
FrameDeny bool `envconfig:"DRONE_HTTP_FRAME_DENY" default:"true"`
ContentTypeNosniff bool `envconfig:"DRONE_HTTP_CONTENT_TYPE_NO_SNIFF"`
ContentSecurityPolicy string `envconfig:"DRONE_HTTP_CONTENT_SECURITY_POLICY"`
ReferrerPolicy string `envconfig:"DRONE_HTTP_REFERRER_POLICY"`
}
)
// Environ returns the settings from the environment.
func Environ() (Config, error) {
cfg := Config{}
err := envconfig.Process("", &cfg)
defaultAddress(&cfg)
defaultProxy(&cfg)
defaultRunner(&cfg)
defaultSession(&cfg)
defaultCallback(&cfg)
configureGithub(&cfg)
if err := kubernetesServiceConflict(&cfg); err != nil {
return cfg, err
}
return cfg, err
}
// String returns the configuration in string format.
func (c *Config) String() string {
out, _ := yaml.Marshal(c)
return string(out)
}
// IsGitHub returns true if the GitHub integration
// is activated.
func (c *Config) IsGitHub() bool {
return c.Github.ClientID != ""
}
// IsGitHubEnterprise returns true if the GitHub
// integration is activated.
func (c *Config) IsGitHubEnterprise() bool {
return c.IsGitHub() && !strings.HasPrefix(c.Github.Server, "https://github.com")
}
// IsGitLab returns true if the GitLab integration
// is activated.
func (c *Config) IsGitLab() bool {
return c.GitLab.ClientID != ""
}
// IsGogs returns true if the Gogs integration
// is activated.
func (c *Config) IsGogs() bool {
return c.Gogs.Server != ""
}
// IsGitea returns true if the Gitea integration
// is activated.
func (c *Config) IsGitea() bool {
return c.Gitea.Server != ""
}
// IsBitbucket returns true if the Bitbucket Cloud
// integration is activated.
func (c *Config) IsBitbucket() bool {
return c.Bitbucket.ClientID != ""
}
// IsStash returns true if the Atlassian Stash
// integration is activated.
func (c *Config) IsStash() bool {
return c.Stash.Server != ""
}
func defaultAddress(c *Config) {
if c.Server.Key != "" || c.Server.Cert != "" || c.Server.Acme {
c.Server.Port = ":443"
c.Server.Proto = "https"
}
c.Server.Addr = c.Server.Proto + "://" + c.Server.Host
}
func defaultProxy(c *Config) {
if c.Proxy.Host == "" {
c.Proxy.Host = c.Server.Host
}
if c.Proxy.Proto == "" {
c.Proxy.Proto = c.Server.Proto
}
c.Proxy.Addr = c.Proxy.Proto + "://" + c.Proxy.Host
}
func defaultCallback(c *Config) {
if c.RPC.Host == "" {
c.RPC.Host = c.Server.Host
}
if c.RPC.Proto == "" {
c.RPC.Proto = c.Server.Proto
}
}
func defaultRunner(c *Config) {
if c.Runner.Machine == "" {
c.Runner.Machine = hostname
}
parts := strings.Split(c.Runner.Platform, "/")
if len(parts) == 2 && c.Runner.OS == "" {
c.Runner.OS = parts[0]
}
if len(parts) == 2 && c.Runner.Arch == "" {
c.Runner.Arch = parts[1]
}
}
func defaultSession(c *Config) {
if c.Session.Secret == "" {
c.Session.Secret = uniuri.NewLen(32)
}
}
func configureGithub(c *Config) {
if c.Github.APIServer != "" {
return
}
if c.Github.Server == "https://github.com" {
c.Github.APIServer = "https://api.github.com"
} else {
c.Github.APIServer = strings.TrimSuffix(c.Github.Server, "/") + "/api/v3"
}
}
func kubernetesServiceConflict(c *Config) error {
if strings.HasPrefix(c.Server.Port, "tcp://") {
return errors.New("Invalid port configuration. See https://discourse.drone.io/t/drone-server-changing-ports-protocol/4144")
}
return nil
}
// Bytes stores number bytes (e.g. megabytes)
type Bytes int64
// Decode implements a decoder that parses a string representation
// of bytes into the number of bytes it represents.
func (b *Bytes) Decode(value string) error {
v, err := humanize.ParseBytes(value)
*b = Bytes(v)
return err
}
// Int64 returns the int64 value of the Byte.
func (b *Bytes) Int64() int64 {
return int64(*b)
}
// String returns the string value of the Byte.
func (b *Bytes) String() string | {
return fmt.Sprint(*b)
} | identifier_body |
|
runner.rs | map(comn::TickNum)
.collect();
if crossed_tick_nums.len() > MAX_TICKS_PER_UPDATE {
// It's possible that we have a large jump in ticks, e.g. due to a
// lag spike, or because we are running in a background tab. In this
// case, we don't want to overload ourselves by sending many input
// packets and performing prediction over many ticks. Instead, we
// just jump directly to the last couple of ticks.
info!("Crossed {} ticks, will skip", crossed_tick_nums.len());
// TODO: In order to nicely reinitialize prediction, we should take
// those crossed ticks for which we actually received a server
// state...
crossed_tick_nums.drain(0..crossed_tick_nums.len() - MAX_TICKS_PER_UPDATE);
assert!(crossed_tick_nums.len() == MAX_TICKS_PER_UPDATE);
}
// Iterate over all the ticks that we have crossed, also including
// those for which we did not receive anything from the server.
let mut events = Vec::new();
for tick_num in crossed_tick_nums.iter() {
coarse_prof::profile!("tick");
// For debugging, keep track of how many ticks we do not
// receive server data on time.
if self.received_states.get(tick_num).is_some() {
self.stats.skip_loss.record_received(tick_num.0 as usize);
}
// Start server events of crossed ticks.
if let Some(tick_events) = self.received_events.get(tick_num) {
events.extend(tick_events.clone().into_iter());
self.received_events.remove(tick_num);
}
// Send inputs for server ticks we cross.
self.last_inputs.push_back((*tick_num, input.clone()));
while self.last_inputs.len() > comn::MAX_INPUTS_PER_MESSAGE {
self.last_inputs.pop_front();
}
self.send(comn::ClientMessage::Input(
self.last_inputs.iter().cloned().collect(),
));
// Predict effects of our own input locally.
if let Some(prediction) = self.prediction.as_mut() {
coarse_prof::profile!("predict");
prediction.record_tick_input(
*tick_num,
input.clone(),
self.received_states.get(tick_num),
);
}
}
coarse_prof::profile!("cleanup");
if self.next_tick_num <= Some(self.tick_num()) {
// We have reached the tick that we were interpolating into, so
// we'll need to look for the next interpolation target.
self.next_tick_num = None;
}
// Do we have a tick to interpolate into ready?
if self.next_tick_num.is_none() {
let min_ready_num = self.received_states.keys().find(|tick_num| {
**tick_num > self.tick_num() && tick_num.0 - self.tick_num().0 <= 3
});
if let Some(min_ready_num) = min_ready_num {
self.next_tick_num = Some(*min_ready_num);
}
}
// Remove events for older ticks, we will no longer need them. Note,
// however, that the same cannot be said about the received states,
// since we may still need them as the basis for delta decoding.
// Received states are only pruned when we receive new states.
{
let remove_tick_nums: Vec<comn::TickNum> = self
.received_events
.keys()
.copied()
.filter(|tick_num| *tick_num < self.tick_num())
.collect();
for tick_num in remove_tick_nums {
self.received_events.remove(&tick_num);
}
}
// Keep some statistics for debugging...
if let Some(recv_game_time) = recv_game_time {
self.stats
.time_lag_ms
.record((recv_game_time - self.interp_game_time) * 1000.0);
} else {
// We cannot estimate the server time, so we probably disconnected
// or just connected.
self.stats.time_lag_ms = stats::Var::default();
}
self.stats
.tick_interp
.record(self.next_tick_num.map_or(0.0, |next_tick_num| {
(next_tick_num.0 - self.tick_num().0) as f32
}));
self.stats
.time_warp_factor
.record(self.next_time_warp_factor);
self.stats.send_rate = self.webrtc_client.send_rate();
self.stats.recv_rate = self.webrtc_client.recv_rate();
self.stats.recv_delay_std_dev = self.recv_tick_time.recv_delay_std_dev().unwrap_or(-1.0);
events
}
// TODO: Both `state` and `next_entities` need to be revised
pub fn state(&self) -> Option<comn::Game> {
// Due to loss, we might not always have an authorative state for the
// current tick num. Take the closest one then.
let mut state = self
.received_states
.iter()
.filter(|(tick_num, _)| **tick_num <= self.tick_num())
.next_back()
.map(|(_, state)| state.game.clone());
// When using prediction, overwrite the predicted entities in the
// authorative state.
if let Some(state) = state.as_mut() {
let predicted_entities = self
.prediction
.as_ref()
.and_then(|prediction| prediction.predicted_entities(self.tick_num()));
if let Some(predicted_entities) = predicted_entities {
state.entities.extend(
predicted_entities
.iter()
.map(|(entity_id, entity)| (*entity_id, entity.clone())),
);
}
}
state
}
pub fn next_entities(&self) -> BTreeMap<comn::EntityId, (comn::GameTime, comn::Entity)> {
let mut entities = BTreeMap::new();
// Add entities from authorative state, if available.
let next_state = self
.next_tick_num
.and_then(|key| self.received_states.get(&key).map(|value| (key, value)));
if let Some((recv_tick_num, recv_state)) = next_state {
let recv_game_time = self.settings.tick_game_time(recv_tick_num);
entities.extend(
recv_state
.game
.entities
.clone()
.into_iter()
.map(|(entity_id, entity)| (entity_id, (recv_game_time, entity))),
);
}
// Add entities from predicted state, if available. Note that, due to
// loss in ticks received from the server, these entities might live in
// a different time from the authorative entities.
let predicted_entities = self
.prediction
.as_ref()
.and_then(|prediction| prediction.predicted_entities(self.tick_num().next()));
if let Some(predicted_entities) = predicted_entities {
let pred_game_time = self.settings.tick_game_time(self.tick_num().next());
entities.extend(
predicted_entities
.clone()
.into_iter()
.map(|(entity_id, entity)| (entity_id, (pred_game_time, entity))),
);
}
entities
}
fn handle_message(&mut self, recv_time: Instant, message: comn::ServerMessage) {
coarse_prof::profile!("handle_message");
match message {
comn::ServerMessage::Ping(_) => {
// Handled in on_message callback to get better ping
// estimates.
}
comn::ServerMessage::Pong(sequence_num) => {
if self.ping.record_pong(recv_time, sequence_num).is_err() {
debug!(
"Ignoring pong with invalid sequence number {:?}",
sequence_num
);
}
}
comn::ServerMessage::Tick(tick) => {
self.record_server_tick(recv_time, tick);
}
comn::ServerMessage::Disconnect => {
self.disconnected = true;
}
}
}
pub fn disconnect(&mut self) {
// Send unreliable message a few times to increase chance of arrival.
for _ in 0..3 {
self.send(comn::ClientMessage::Disconnect);
}
self.disconnected = true;
}
fn send(&self, message: comn::ClientMessage) {
coarse_prof::profile!("send");
let signed_message = comn::SignedClientMessage(self.my_token, message);
let data = signed_message.serialize();
coarse_prof::profile!("webrtc");
if let Err(err) = self.webrtc_client.send(&data) {
warn!("Failed to send message: {:?}", err);
}
}
fn record_server_tick(&mut self, recv_time: Instant, tick: comn::Tick) | {
let recv_tick_num = tick.diff.tick_num;
let recv_game_time = self.settings.tick_game_time(recv_tick_num);
// Keep some statistics for debugging...
self.stats.loss.record_received(recv_tick_num.0 as usize);
if let Some(my_last_input_num) = tick.your_last_input_num.as_ref() {
self.stats
.input_delay
.record((recv_tick_num.0 - my_last_input_num.0) as f32 - 1.0);
}
if recv_game_time < self.interp_game_time {
debug!(
"Ignoring old tick of time {} vs our interp_game_time={}",
recv_game_time, self.interp_game_time,
);
return;
}
| identifier_body |
|
runner.rs | ) {
self.send(comn::ClientMessage::Ping(sequence_num));
}
// Determine new local game time, making sure to stay behind the receive
// stream by our desired lag time. We do this so that we have ticks
// between which we can interpolate.
//
// If we are off too far from our lag target, slow down or speed up
// playback time.
let time_since_start = now.duration_since(self.start_time).as_secs_f32();
let recv_game_time = self.recv_tick_time.estimate(time_since_start);
let new_interp_game_time = if let Some(recv_game_time) = recv_game_time {
let current_time_lag = recv_game_time - (self.interp_game_time + dt.as_secs_f32());
let time_lag_deviation = self.target_time_lag() - current_time_lag;
self.stats
.time_lag_deviation_ms
.record(time_lag_deviation * 1000.0);
if time_lag_deviation.abs() < MAX_TIME_LAG_DEVIATION {
/*let k = 0.5 + (2.0 - 0.5) / (1.0 + 2.0 * (time_lag_deviation / 0.05).exp());
if time_lag_deviation > 0.0 {
1.0 / k
} else {
k
}*/
//0.5 * ((-time_lag_deviation).tanh() + 2.0)
self.next_time_warp_factor =
0.5 + (2.0 - 0.5) / (1.0 + 2.0 * (time_lag_deviation / 0.005).exp());
self.interp_game_time + self.next_time_warp_factor * dt.as_secs_f32()
} else {
// Our playback time is too far off, just jump directly to the
// target time.
let target_time = recv_game_time - self.target_time_lag();
info!(
"Time is off by {}, jumping to {}",
time_lag_deviation, target_time
);
target_time
}
} else {
// We have no knowledge of the tick receive time, probably didn't
// receive the first tick packet yet.
self.interp_game_time
};
// Don't let time run further than the ticks that we have received.
// This is here so that we stop local time if the server drops or
// starts lagging heavily.
let max_tick_num = self
.received_states
.keys()
.rev()
.next()
.copied()
.unwrap_or(comn::TickNum(0))
.max(self.tick_num())
.max(self.next_tick_num.unwrap_or(comn::TickNum(0)));
// Advance our playback time.
let prev_tick_num = self.tick_num();
self.interp_game_time =
new_interp_game_time.min(self.settings.tick_game_time(max_tick_num));
let new_tick_num = self.tick_num();
// Look at all the intermediate ticks. We will have one of the
// following cases:
//
// 1. In this update call, the tick number did not change, so
// `prev_tick_num == new_tick_num`.
// 2. We crossed one tick, e.g. prev_tick_num is 7 and new_tick_num is
// 8.
// 3. We crossed more than one tick. This should happen only on lag
// spikes, be it local or in the network.
let mut crossed_tick_nums: Vec<comn::TickNum> = (prev_tick_num.0 + 1..=new_tick_num.0)
.map(comn::TickNum)
.collect();
if crossed_tick_nums.len() > MAX_TICKS_PER_UPDATE {
// It's possible that we have a large jump in ticks, e.g. due to a
// lag spike, or because we are running in a background tab. In this
// case, we don't want to overload ourselves by sending many input
// packets and performing prediction over many ticks. Instead, we
// just jump directly to the last couple of ticks.
info!("Crossed {} ticks, will skip", crossed_tick_nums.len());
// TODO: In order to nicely reinitialize prediction, we should take
// those crossed ticks for which we actually received a server
// state...
crossed_tick_nums.drain(0..crossed_tick_nums.len() - MAX_TICKS_PER_UPDATE);
assert!(crossed_tick_nums.len() == MAX_TICKS_PER_UPDATE);
}
// Iterate over all the ticks that we have crossed, also including
// those for which we did not receive anything from the server.
let mut events = Vec::new();
for tick_num in crossed_tick_nums.iter() {
coarse_prof::profile!("tick");
// For debugging, keep track of how many ticks we do not
// receive server data on time.
if self.received_states.get(tick_num).is_some() {
self.stats.skip_loss.record_received(tick_num.0 as usize);
}
// Start server events of crossed ticks.
if let Some(tick_events) = self.received_events.get(tick_num) {
events.extend(tick_events.clone().into_iter());
self.received_events.remove(tick_num);
}
// Send inputs for server ticks we cross.
self.last_inputs.push_back((*tick_num, input.clone()));
while self.last_inputs.len() > comn::MAX_INPUTS_PER_MESSAGE {
self.last_inputs.pop_front();
}
self.send(comn::ClientMessage::Input(
self.last_inputs.iter().cloned().collect(),
));
// Predict effects of our own input locally.
if let Some(prediction) = self.prediction.as_mut() {
coarse_prof::profile!("predict");
prediction.record_tick_input(
*tick_num,
input.clone(),
self.received_states.get(tick_num),
);
}
}
coarse_prof::profile!("cleanup");
if self.next_tick_num <= Some(self.tick_num()) {
// We have reached the tick that we were interpolating into, so
// we'll need to look for the next interpolation target.
self.next_tick_num = None;
}
// Do we have a tick to interpolate into ready?
if self.next_tick_num.is_none() {
let min_ready_num = self.received_states.keys().find(|tick_num| {
**tick_num > self.tick_num() && tick_num.0 - self.tick_num().0 <= 3
});
if let Some(min_ready_num) = min_ready_num {
self.next_tick_num = Some(*min_ready_num);
}
}
// Remove events for older ticks, we will no longer need them. Note,
// however, that the same cannot be said about the received states,
// since we may still need them as the basis for delta decoding.
// Received states are only pruned when we receive new states.
{
let remove_tick_nums: Vec<comn::TickNum> = self
.received_events
.keys()
.copied()
.filter(|tick_num| *tick_num < self.tick_num())
.collect();
for tick_num in remove_tick_nums {
self.received_events.remove(&tick_num);
}
}
// Keep some statistics for debugging...
if let Some(recv_game_time) = recv_game_time {
self.stats
.time_lag_ms
.record((recv_game_time - self.interp_game_time) * 1000.0);
} else {
// We cannot estimate the server time, so we probably disconnected
// or just connected.
self.stats.time_lag_ms = stats::Var::default();
}
self.stats
.tick_interp
.record(self.next_tick_num.map_or(0.0, |next_tick_num| {
(next_tick_num.0 - self.tick_num().0) as f32
}));
self.stats
.time_warp_factor
.record(self.next_time_warp_factor);
self.stats.send_rate = self.webrtc_client.send_rate();
self.stats.recv_rate = self.webrtc_client.recv_rate();
self.stats.recv_delay_std_dev = self.recv_tick_time.recv_delay_std_dev().unwrap_or(-1.0);
events
}
// TODO: Both `state` and `next_entities` need to be revised
pub fn state(&self) -> Option<comn::Game> {
// Due to loss, we might not always have an authorative state for the
// current tick num. Take the closest one then.
let mut state = self
.received_states
.iter()
.filter(|(tick_num, _)| **tick_num <= self.tick_num())
.next_back()
.map(|(_, state)| state.game.clone());
// When using prediction, overwrite the predicted entities in the
// authorative state.
if let Some(state) = state.as_mut() {
let predicted_entities = self
.prediction
.as_ref()
.and_then(|prediction| prediction.predicted_entities(self.tick_num()));
if let Some(predicted_entities) = predicted_entities {
state.entities.extend(
predicted_entities
.iter() | .map(|(entity_id, entity)| (*entity_id, entity.clone())),
);
} | random_line_split |
|
runner.rs | (&self) -> &Stats {
&self.stats
}
pub fn ping(&self) -> &PingEstimation {
&self.ping
}
pub fn interp_game_time(&self) -> comn::GameTime {
self.interp_game_time
}
fn target_time_lag(&self) -> comn::GameTime {
self.settings.tick_period() * 1.5
}
fn tick_num(&self) -> comn::TickNum {
comn::TickNum((self.interp_game_time / self.settings.tick_period()) as u32)
}
pub fn update(&mut self, now: Instant, dt: Duration, input: &comn::Input) -> Vec<comn::Event> {
assert!(self.is_good());
{
coarse_prof::profile!("webrtc");
self.webrtc_client.set_now((Instant::now(), now));
while let Some((recv_time, message)) = self.webrtc_client.take_message() {
self.handle_message(recv_time, message);
}
}
if let Some(sequence_num) = self.ping.next_ping_sequence_num(now) {
self.send(comn::ClientMessage::Ping(sequence_num));
}
// Determine new local game time, making sure to stay behind the receive
// stream by our desired lag time. We do this so that we have ticks
// between which we can interpolate.
//
// If we are off too far from our lag target, slow down or speed up
// playback time.
let time_since_start = now.duration_since(self.start_time).as_secs_f32();
let recv_game_time = self.recv_tick_time.estimate(time_since_start);
let new_interp_game_time = if let Some(recv_game_time) = recv_game_time {
let current_time_lag = recv_game_time - (self.interp_game_time + dt.as_secs_f32());
let time_lag_deviation = self.target_time_lag() - current_time_lag;
self.stats
.time_lag_deviation_ms
.record(time_lag_deviation * 1000.0);
if time_lag_deviation.abs() < MAX_TIME_LAG_DEVIATION {
/*let k = 0.5 + (2.0 - 0.5) / (1.0 + 2.0 * (time_lag_deviation / 0.05).exp());
if time_lag_deviation > 0.0 {
1.0 / k
} else {
k
}*/
//0.5 * ((-time_lag_deviation).tanh() + 2.0)
self.next_time_warp_factor =
0.5 + (2.0 - 0.5) / (1.0 + 2.0 * (time_lag_deviation / 0.005).exp());
self.interp_game_time + self.next_time_warp_factor * dt.as_secs_f32()
} else {
// Our playback time is too far off, just jump directly to the
// target time.
let target_time = recv_game_time - self.target_time_lag();
info!(
"Time is off by {}, jumping to {}",
time_lag_deviation, target_time
);
target_time
}
} else {
// We have no knowledge of the tick receive time, probably didn't
// receive the first tick packet yet.
self.interp_game_time
};
// Don't let time run further than the ticks that we have received.
// This is here so that we stop local time if the server drops or
// starts lagging heavily.
let max_tick_num = self
.received_states
.keys()
.rev()
.next()
.copied()
.unwrap_or(comn::TickNum(0))
.max(self.tick_num())
.max(self.next_tick_num.unwrap_or(comn::TickNum(0)));
// Advance our playback time.
let prev_tick_num = self.tick_num();
self.interp_game_time =
new_interp_game_time.min(self.settings.tick_game_time(max_tick_num));
let new_tick_num = self.tick_num();
// Look at all the intermediate ticks. We will have one of the
// following cases:
//
// 1. In this update call, the tick number did not change, so
// `prev_tick_num == new_tick_num`.
// 2. We crossed one tick, e.g. prev_tick_num is 7 and new_tick_num is
// 8.
// 3. We crossed more than one tick. This should happen only on lag
// spikes, be it local or in the network.
let mut crossed_tick_nums: Vec<comn::TickNum> = (prev_tick_num.0 + 1..=new_tick_num.0)
.map(comn::TickNum)
.collect();
if crossed_tick_nums.len() > MAX_TICKS_PER_UPDATE {
// It's possible that we have a large jump in ticks, e.g. due to a
// lag spike, or because we are running in a background tab. In this
// case, we don't want to overload ourselves by sending many input
// packets and performing prediction over many ticks. Instead, we
// just jump directly to the last couple of ticks.
info!("Crossed {} ticks, will skip", crossed_tick_nums.len());
// TODO: In order to nicely reinitialize prediction, we should take
// those crossed ticks for which we actually received a server
// state...
crossed_tick_nums.drain(0..crossed_tick_nums.len() - MAX_TICKS_PER_UPDATE);
assert!(crossed_tick_nums.len() == MAX_TICKS_PER_UPDATE);
}
// Iterate over all the ticks that we have crossed, also including
// those for which we did not receive anything from the server.
let mut events = Vec::new();
for tick_num in crossed_tick_nums.iter() {
coarse_prof::profile!("tick");
// For debugging, keep track of how many ticks we do not
// receive server data on time.
if self.received_states.get(tick_num).is_some() {
self.stats.skip_loss.record_received(tick_num.0 as usize);
}
// Start server events of crossed ticks.
if let Some(tick_events) = self.received_events.get(tick_num) {
events.extend(tick_events.clone().into_iter());
self.received_events.remove(tick_num);
}
// Send inputs for server ticks we cross.
self.last_inputs.push_back((*tick_num, input.clone()));
while self.last_inputs.len() > comn::MAX_INPUTS_PER_MESSAGE {
self.last_inputs.pop_front();
}
self.send(comn::ClientMessage::Input(
self.last_inputs.iter().cloned().collect(),
));
// Predict effects of our own input locally.
if let Some(prediction) = self.prediction.as_mut() {
coarse_prof::profile!("predict");
prediction.record_tick_input(
*tick_num,
input.clone(),
self.received_states.get(tick_num),
);
}
}
coarse_prof::profile!("cleanup");
if self.next_tick_num <= Some(self.tick_num()) {
// We have reached the tick that we were interpolating into, so
// we'll need to look for the next interpolation target.
self.next_tick_num = None;
}
// Do we have a tick to interpolate into ready?
if self.next_tick_num.is_none() {
let min_ready_num = self.received_states.keys().find(|tick_num| {
**tick_num > self.tick_num() && tick_num.0 - self.tick_num().0 <= 3
});
if let Some(min_ready_num) = min_ready_num {
self.next_tick_num = Some(*min_ready_num);
}
}
// Remove events for older ticks, we will no longer need them. Note,
// however, that the same cannot be said about the received states,
// since we may still need them as the basis for delta decoding.
// Received states are only pruned when we receive new states.
{
let remove_tick_nums: Vec<comn::TickNum> = self
.received_events
.keys()
.copied()
.filter(|tick_num| *tick_num < self.tick_num())
.collect();
for tick_num in remove_tick_nums {
self.received_events.remove(&tick_num);
}
}
// Keep some statistics for debugging...
if let Some(recv_game_time) = recv_game_time {
self.stats
.time_lag_ms
.record((recv_game_time - self.interp_game_time) * 1000.0);
} else {
// We cannot estimate the server time, so we probably disconnected
// or just connected.
self.stats.time_lag_ms = stats::Var::default();
}
self.stats
.tick_interp
.record(self.next_tick_num.map_or(0.0, |next_tick_num| {
(next_tick_num.0 - self.tick_num().0) as f32
}));
self.stats
.time_warp_factor
.record(self.next_time_warp_factor);
self.stats.send_rate = self.webrtc_client.send_rate();
self.stats.recv_rate = self.webrtc_client.recv_rate();
self.stats.recv_delay_std_dev = self.recv | stats | identifier_name |
|
runner.rs | ) -> &Stats {
&self.stats
}
pub fn ping(&self) -> &PingEstimation {
&self.ping
}
pub fn interp_game_time(&self) -> comn::GameTime {
self.interp_game_time
}
fn target_time_lag(&self) -> comn::GameTime {
self.settings.tick_period() * 1.5
}
fn tick_num(&self) -> comn::TickNum {
comn::TickNum((self.interp_game_time / self.settings.tick_period()) as u32)
}
pub fn update(&mut self, now: Instant, dt: Duration, input: &comn::Input) -> Vec<comn::Event> {
assert!(self.is_good());
{
coarse_prof::profile!("webrtc");
self.webrtc_client.set_now((Instant::now(), now));
while let Some((recv_time, message)) = self.webrtc_client.take_message() {
self.handle_message(recv_time, message);
}
}
if let Some(sequence_num) = self.ping.next_ping_sequence_num(now) {
self.send(comn::ClientMessage::Ping(sequence_num));
}
// Determine new local game time, making sure to stay behind the receive
// stream by our desired lag time. We do this so that we have ticks
// between which we can interpolate.
//
// If we are off too far from our lag target, slow down or speed up
// playback time.
let time_since_start = now.duration_since(self.start_time).as_secs_f32();
let recv_game_time = self.recv_tick_time.estimate(time_since_start);
let new_interp_game_time = if let Some(recv_game_time) = recv_game_time {
let current_time_lag = recv_game_time - (self.interp_game_time + dt.as_secs_f32());
let time_lag_deviation = self.target_time_lag() - current_time_lag;
self.stats
.time_lag_deviation_ms
.record(time_lag_deviation * 1000.0);
if time_lag_deviation.abs() < MAX_TIME_LAG_DEVIATION {
/*let k = 0.5 + (2.0 - 0.5) / (1.0 + 2.0 * (time_lag_deviation / 0.05).exp());
if time_lag_deviation > 0.0 {
1.0 / k
} else {
k
}*/
//0.5 * ((-time_lag_deviation).tanh() + 2.0)
self.next_time_warp_factor =
0.5 + (2.0 - 0.5) / (1.0 + 2.0 * (time_lag_deviation / 0.005).exp());
self.interp_game_time + self.next_time_warp_factor * dt.as_secs_f32()
} else {
// Our playback time is too far off, just jump directly to the
// target time.
let target_time = recv_game_time - self.target_time_lag();
info!(
"Time is off by {}, jumping to {}",
time_lag_deviation, target_time
);
target_time
}
} else {
// We have no knowledge of the tick receive time, probably didn't
// receive the first tick packet yet.
self.interp_game_time
};
// Don't let time run further than the ticks that we have received.
// This is here so that we stop local time if the server drops or
// starts lagging heavily.
let max_tick_num = self
.received_states
.keys()
.rev()
.next()
.copied()
.unwrap_or(comn::TickNum(0))
.max(self.tick_num())
.max(self.next_tick_num.unwrap_or(comn::TickNum(0)));
// Advance our playback time.
let prev_tick_num = self.tick_num();
self.interp_game_time =
new_interp_game_time.min(self.settings.tick_game_time(max_tick_num));
let new_tick_num = self.tick_num();
// Look at all the intermediate ticks. We will have one of the
// following cases:
//
// 1. In this update call, the tick number did not change, so
// `prev_tick_num == new_tick_num`.
// 2. We crossed one tick, e.g. prev_tick_num is 7 and new_tick_num is
// 8.
// 3. We crossed more than one tick. This should happen only on lag
// spikes, be it local or in the network.
let mut crossed_tick_nums: Vec<comn::TickNum> = (prev_tick_num.0 + 1..=new_tick_num.0)
.map(comn::TickNum)
.collect();
if crossed_tick_nums.len() > MAX_TICKS_PER_UPDATE {
// It's possible that we have a large jump in ticks, e.g. due to a
// lag spike, or because we are running in a background tab. In this
// case, we don't want to overload ourselves by sending many input
// packets and performing prediction over many ticks. Instead, we
// just jump directly to the last couple of ticks.
info!("Crossed {} ticks, will skip", crossed_tick_nums.len());
// TODO: In order to nicely reinitialize prediction, we should take
// those crossed ticks for which we actually received a server
// state...
crossed_tick_nums.drain(0..crossed_tick_nums.len() - MAX_TICKS_PER_UPDATE);
assert!(crossed_tick_nums.len() == MAX_TICKS_PER_UPDATE);
}
// Iterate over all the ticks that we have crossed, also including
// those for which we did not receive anything from the server.
let mut events = Vec::new();
for tick_num in crossed_tick_nums.iter() {
coarse_prof::profile!("tick");
// For debugging, keep track of how many ticks we do not
// receive server data on time.
if self.received_states.get(tick_num).is_some() {
self.stats.skip_loss.record_received(tick_num.0 as usize);
}
// Start server events of crossed ticks.
if let Some(tick_events) = self.received_events.get(tick_num) {
events.extend(tick_events.clone().into_iter());
self.received_events.remove(tick_num);
}
// Send inputs for server ticks we cross.
self.last_inputs.push_back((*tick_num, input.clone()));
while self.last_inputs.len() > comn::MAX_INPUTS_PER_MESSAGE {
self.last_inputs.pop_front();
}
self.send(comn::ClientMessage::Input(
self.last_inputs.iter().cloned().collect(),
));
// Predict effects of our own input locally.
if let Some(prediction) = self.prediction.as_mut() {
coarse_prof::profile!("predict");
prediction.record_tick_input(
*tick_num,
input.clone(),
self.received_states.get(tick_num),
);
}
}
coarse_prof::profile!("cleanup");
if self.next_tick_num <= Some(self.tick_num()) {
// We have reached the tick that we were interpolating into, so
// we'll need to look for the next interpolation target.
self.next_tick_num = None;
}
// Do we have a tick to interpolate into ready?
if self.next_tick_num.is_none() {
let min_ready_num = self.received_states.keys().find(|tick_num| {
**tick_num > self.tick_num() && tick_num.0 - self.tick_num().0 <= 3
});
if let Some(min_ready_num) = min_ready_num |
}
// Remove events for older ticks, we will no longer need them. Note,
// however, that the same cannot be said about the received states,
// since we may still need them as the basis for delta decoding.
// Received states are only pruned when we receive new states.
{
let remove_tick_nums: Vec<comn::TickNum> = self
.received_events
.keys()
.copied()
.filter(|tick_num| *tick_num < self.tick_num())
.collect();
for tick_num in remove_tick_nums {
self.received_events.remove(&tick_num);
}
}
// Keep some statistics for debugging...
if let Some(recv_game_time) = recv_game_time {
self.stats
.time_lag_ms
.record((recv_game_time - self.interp_game_time) * 1000.0);
} else {
// We cannot estimate the server time, so we probably disconnected
// or just connected.
self.stats.time_lag_ms = stats::Var::default();
}
self.stats
.tick_interp
.record(self.next_tick_num.map_or(0.0, |next_tick_num| {
(next_tick_num.0 - self.tick_num().0) as f32
}));
self.stats
.time_warp_factor
.record(self.next_time_warp_factor);
self.stats.send_rate = self.webrtc_client.send_rate();
self.stats.recv_rate = self.webrtc_client.recv_rate();
self.stats.recv_delay_std_dev = self.recv_tick | {
self.next_tick_num = Some(*min_ready_num);
} | conditional_block |
main.js | grape', 'watermelon', 'raspberry', 'blackberry', 'strawberry', 'banana', 'peach', 'pear', 'cherry', 'grapefruit', 'kiwi', 'mango', 'lemon', 'avocado', 'cantaloupe', 'apricot', 'papaya', 'fig', 'tangerine', 'pomegranate', 'coconut', 'cranberry', 'passionfruit', 'lychee', 'loquat', 'pitaya', 'jujube', 'boysenberry', 'tangelo', 'guava'
],
gotChars: [
'daenerystargaryen', 'jonsnow', 'aryastark', 'sansastark', 'themountain', 'cerseilannister', 'tyrionlannister', 'khaldrogo', 'joffreybaratheon', 'margaerytyrell', 'melisandre', 'thehound', 'ramsaybolton', 'eddardstark', 'hodor', 'brienne', 'lordbaelish', 'branstark', 'robbstark', 'daarionaharis', 'bronn', 'lordvarys', 'theongreyjoy', 'stannisbaratheon', 'jaimelannister', 'jorahmormont', 'tormundgiantsbane', 'highsparrow', 'rickonstark', 'davosseaworth'
],
superheros: [
'spiderman', 'hulk', 'thor', 'ironman', 'lukecage', 'blackwidow', 'daredevil', 'captainamerica', 'wolverine', 'doctorstrange', 'deadpool', 'captainamerica', 'batman', 'superman', 'wonderwoman', 'aquaman', 'flash', 'greenarrow', 'ironfist', 'captainatom', 'antman', 'greenlantern'
],
animals: [
'cat', 'dog', 'giraffe', 'monkey', 'gorilla', 'elephant', 'lion', 'tiger', 'bear', 'dolphin', 'whale', 'shark', 'shrimp', 'mouse', 'camel', 'llama', 'horse', 'donkey', 'chicken', 'moose', 'deer', 'wolf', 'coyote', 'rabbit', 'owl', 'hawk', 'squid', 'octopus', 'eel', 'snake', 'gecko', 'pig', 'ostrich', 'cow', 'bull', 'goat', 'sheep', 'turtle', 'tortoise', 'rooster', 'bat', 'rhinoceros', 'alligator', 'crocodile', 'hamster', 'hyena', 'hippopotamus', 'jaguar', 'jellyfish', 'manatee', 'opossum', 'otter', 'parrot', 'porcupine', 'raccoon', 'seahorse', 'sloth', 'skunk', 'spider', 'squirrel', 'walrus', 'wolverine', 'zebra', 'gazelle', 'frog', 'eagle', 'duck', 'cheetah', 'chinchilla', 'buffalo', 'beaver', 'armadillo', 'alpaca', 'aardvark'
]
},
// new game method - called onclick
newGame: function (category, difficulty) {
this.resetGame(difficulty);
if (this.gameCounter === 0) {
this.drawKeyboard();
} else if (this.gameCounter > 0) {
$('.letter-button').css({
'background-color': '#454545'
})
$('#keyboard').toggle('slow');
}
var category = this.categories[category];
var rngWord = category[Math.floor(Math.random() * (category.length))];
var counter = 0;
while (this.usedWords.indexOf(rngWord) >= 0 && counter < category.length) {
rngWord = category[Math.floor(Math.random() * (category.length))];
counter++;
}
this.newWord = rngWord;
this.usedWords.push(this.newWord);
// console.log(this.usedWords);
this.generateSecretWord(this.newWord);
this.updateSecretWordUI(this.secretWord);
this.updateCategoryUI(category);
},
// fills secretWord arr with '_' using newWord
generateSecretWord: function (newWord) {
for (var letter of newWord) {
this.secretWord.push('_');
}
},
// resets guessedLetters and guessesLeft
resetGame: function (difficulty) {
// reset guessedLetters to empty arr
this.guessedLetters = [];
// reset guessesLeft to 10
this.guessesLeft = difficulty;
// reset secretWrod to empty arr
this.secretWord = [];
// reset innerHTML of wordElem
wordElem.innerHTML = '';
winLoseMsg.innerHTML = '';
// reset guesses innerHTML
this.showGuessInfo();
this.hideButtons();
},
guessLetter: function (guess, secret, word) {
// if a word has been chosen and user has guesses left
var tempSecretWord = secret;
if (this.newWord !== '' && this.guessesLeft > 0) {
var checkIndex = 0;
// this will set checkIndex to -1 if guess is not in word
checkIndex = word.indexOf(guess);
// if guess is not in word AND
// if guess is not in guessedLetters, decrement guessesLeft
// push guess to guessedLetters AND update UI
if (checkIndex === -1 && this.guessedLetters.indexOf(guess) === -1) |
// convert secret arr to string
secret = secret.join('');
while (checkIndex >= 0) {
// update secret arr
secret = this.updateSecretWord(checkIndex, guess, secret);
// move to next index of word, reassign checkIndex
checkIndex = word.indexOf(guess, checkIndex + 1);
}
// empty secretWord arr to push updated arr (secret) into it afterwards
this.secretWord = [];
var secretArr = secret.split('');
for (var char of secret) {
this.secretWord.push(char);
}
// updates secret word in ui
this.updateSecretWordUI(this.secretWord);
}
// check if they won
this.checkIfWon();
//TODO
this.checkIfLost();
return true;
},
// if a correct letter is guessed, updates the secret word (changes corresponding dashes to letters)
updateSecretWord: function (pos, char, originWord) {
return originWord.substring(0, pos) + char + originWord.substring(pos + 1, originWord.length);
},
updateSecretWordUI: function (secretWord) {
wordElem.innerHTML = '';
for (var char of secretWord) {
wordElem.innerHTML += char + ' ';
}
},
updateCategoryUI: function (category) {
if (category === this.categories.fruits) {
categoryUI.innerHTML = "Fruits";
} else if (category === this.categories.gotChars) {
categoryUI.innerHTML = "Game of Thrones Characters";
} else if (category === this.categories.superheros) {
categoryUI.innerHTML = "Superheros";
} else if (category === this.categories.animals) {
categoryUI.innerHTML = "Animals";
}
},
// compare secretWord to word to check if they won
checkIfWon: function () {
var secretString = this.secretWord.join('');
if (secretString === this.newWord && this.newWord !== '') {
winLoseMsg.innerHTML = 'YOU WON!';
this.incrementWins();
this.hideGuessInfo();
this.showButtons();
this.gameCounter++;
$('#keyboard').toggle('slow');
}
},
incrementWins: function () {
this.consecutiveWins++;
$('#consecutive-wins').html('Consecutive Wins: ' + this.consecutiveWins);
},
checkIfLost: function () {
// DO STUFF HERE
if (this.guessesLeft === 0) {
winLoseMsg.innerHTML = 'YOU LOSE. GAME OVER <br>';
winLoseMsg.innerHTML += 'The Word: ' + this.newWord;
this.showButtons();
// reset usedWords arr
this.usedWords = [];
this.consecutiveWins = 0;
$('#consecutive-wins').html('Consecutive Wins: ' + this.consecutiveWins);
this.gameCounter++;
$('#keyboard').toggle('slow');
}
},
hideButtons: function () {
// hides newGame buttons after 1 is clicked
$('.newGame-btn').css({
'display': 'none'
});
},
showButtons: function () {
// hides newGame buttons after 1 is clicked
$('.newGame-btn').css({
'display': 'block'
});
},
hideGuessInfo: function () {
guessesLeftElem.innerHTML = '';
// reset guessedLetters innerHTML
guessedLettersElem.innerHTML = '';
},
showGuessInfo: function () {
this.updateGuessesUI();
// reset guessedLetters innerHTML | {
this.guessesLeft--;
this.guessedLetters.push(guess);
this.updateGuessesUI();
return false;
} | conditional_block |
main.js | grape', 'watermelon', 'raspberry', 'blackberry', 'strawberry', 'banana', 'peach', 'pear', 'cherry', 'grapefruit', 'kiwi', 'mango', 'lemon', 'avocado', 'cantaloupe', 'apricot', 'papaya', 'fig', 'tangerine', 'pomegranate', 'coconut', 'cranberry', 'passionfruit', 'lychee', 'loquat', 'pitaya', 'jujube', 'boysenberry', 'tangelo', 'guava'
],
gotChars: [
'daenerystargaryen', 'jonsnow', 'aryastark', 'sansastark', 'themountain', 'cerseilannister', 'tyrionlannister', 'khaldrogo', 'joffreybaratheon', 'margaerytyrell', 'melisandre', 'thehound', 'ramsaybolton', 'eddardstark', 'hodor', 'brienne', 'lordbaelish', 'branstark', 'robbstark', 'daarionaharis', 'bronn', 'lordvarys', 'theongreyjoy', 'stannisbaratheon', 'jaimelannister', 'jorahmormont', 'tormundgiantsbane', 'highsparrow', 'rickonstark', 'davosseaworth'
],
superheros: [
'spiderman', 'hulk', 'thor', 'ironman', 'lukecage', 'blackwidow', 'daredevil', 'captainamerica', 'wolverine', 'doctorstrange', 'deadpool', 'captainamerica', 'batman', 'superman', 'wonderwoman', 'aquaman', 'flash', 'greenarrow', 'ironfist', 'captainatom', 'antman', 'greenlantern'
],
animals: [
'cat', 'dog', 'giraffe', 'monkey', 'gorilla', 'elephant', 'lion', 'tiger', 'bear', 'dolphin', 'whale', 'shark', 'shrimp', 'mouse', 'camel', 'llama', 'horse', 'donkey', 'chicken', 'moose', 'deer', 'wolf', 'coyote', 'rabbit', 'owl', 'hawk', 'squid', 'octopus', 'eel', 'snake', 'gecko', 'pig', 'ostrich', 'cow', 'bull', 'goat', 'sheep', 'turtle', 'tortoise', 'rooster', 'bat', 'rhinoceros', 'alligator', 'crocodile', 'hamster', 'hyena', 'hippopotamus', 'jaguar', 'jellyfish', 'manatee', 'opossum', 'otter', 'parrot', 'porcupine', 'raccoon', 'seahorse', 'sloth', 'skunk', 'spider', 'squirrel', 'walrus', 'wolverine', 'zebra', 'gazelle', 'frog', 'eagle', 'duck', 'cheetah', 'chinchilla', 'buffalo', 'beaver', 'armadillo', 'alpaca', 'aardvark'
]
},
// new game method - called onclick
newGame: function (category, difficulty) {
this.resetGame(difficulty);
if (this.gameCounter === 0) {
this.drawKeyboard();
} else if (this.gameCounter > 0) {
$('.letter-button').css({
'background-color': '#454545'
})
$('#keyboard').toggle('slow');
}
var category = this.categories[category];
var rngWord = category[Math.floor(Math.random() * (category.length))];
var counter = 0;
while (this.usedWords.indexOf(rngWord) >= 0 && counter < category.length) {
rngWord = category[Math.floor(Math.random() * (category.length))];
counter++;
}
this.newWord = rngWord;
this.usedWords.push(this.newWord);
// console.log(this.usedWords);
this.generateSecretWord(this.newWord);
this.updateSecretWordUI(this.secretWord);
this.updateCategoryUI(category);
},
// fills secretWord arr with '_' using newWord
generateSecretWord: function (newWord) {
for (var letter of newWord) {
this.secretWord.push('_');
}
},
// resets guessedLetters and guessesLeft
resetGame: function (difficulty) {
// reset guessedLetters to empty arr
this.guessedLetters = [];
// reset guessesLeft to 10
this.guessesLeft = difficulty;
// reset secretWrod to empty arr
this.secretWord = [];
// reset innerHTML of wordElem
wordElem.innerHTML = '';
winLoseMsg.innerHTML = '';
// reset guesses innerHTML
this.showGuessInfo();
this.hideButtons();
},
guessLetter: function (guess, secret, word) {
// if a word has been chosen and user has guesses left
var tempSecretWord = secret;
if (this.newWord !== '' && this.guessesLeft > 0) {
var checkIndex = 0;
// this will set checkIndex to -1 if guess is not in word
checkIndex = word.indexOf(guess);
// if guess is not in word AND
// if guess is not in guessedLetters, decrement guessesLeft
// push guess to guessedLetters AND update UI
if (checkIndex === -1 && this.guessedLetters.indexOf(guess) === -1) {
this.guessesLeft--;
this.guessedLetters.push(guess);
this.updateGuessesUI();
return false;
}
// convert secret arr to string
secret = secret.join('');
while (checkIndex >= 0) {
// update secret arr
secret = this.updateSecretWord(checkIndex, guess, secret);
// move to next index of word, reassign checkIndex
checkIndex = word.indexOf(guess, checkIndex + 1);
}
// empty secretWord arr to push updated arr (secret) into it afterwards
this.secretWord = [];
var secretArr = secret.split('');
for (var char of secret) {
this.secretWord.push(char);
}
// updates secret word in ui
this.updateSecretWordUI(this.secretWord);
}
// check if they won
this.checkIfWon();
//TODO
this.checkIfLost();
return true;
},
// if a correct letter is guessed, updates the secret word (changes corresponding dashes to letters)
updateSecretWord: function (pos, char, originWord) {
return originWord.substring(0, pos) + char + originWord.substring(pos + 1, originWord.length);
},
updateSecretWordUI: function (secretWord) {
wordElem.innerHTML = '';
for (var char of secretWord) {
wordElem.innerHTML += char + ' ';
}
},
updateCategoryUI: function (category) {
if (category === this.categories.fruits) {
categoryUI.innerHTML = "Fruits";
} else if (category === this.categories.gotChars) {
categoryUI.innerHTML = "Game of Thrones Characters";
} else if (category === this.categories.superheros) {
categoryUI.innerHTML = "Superheros";
} else if (category === this.categories.animals) {
categoryUI.innerHTML = "Animals";
}
},
// compare secretWord to word to check if they won
checkIfWon: function () {
var secretString = this.secretWord.join('');
if (secretString === this.newWord && this.newWord !== '') {
winLoseMsg.innerHTML = 'YOU WON!';
this.incrementWins();
this.hideGuessInfo();
this.showButtons();
this.gameCounter++;
$('#keyboard').toggle('slow');
}
},
incrementWins: function () {
this.consecutiveWins++;
$('#consecutive-wins').html('Consecutive Wins: ' + this.consecutiveWins);
},
checkIfLost: function () {
// DO STUFF HERE
if (this.guessesLeft === 0) {
winLoseMsg.innerHTML = 'YOU LOSE. GAME OVER <br>';
winLoseMsg.innerHTML += 'The Word: ' + this.newWord;
this.showButtons();
// reset usedWords arr
this.usedWords = [];
this.consecutiveWins = 0;
$('#consecutive-wins').html('Consecutive Wins: ' + this.consecutiveWins);
this.gameCounter++;
$('#keyboard').toggle('slow');
}
},
hideButtons: function () {
// hides newGame buttons after 1 is clicked
$('.newGame-btn').css({
'display': 'none'
});
},
showButtons: function () {
// hides newGame buttons after 1 is clicked
$('.newGame-btn').css({
'display': 'block' | // reset guessedLetters innerHTML
guessedLettersElem.innerHTML = '';
},
showGuessInfo: function () {
this.updateGuessesUI();
// reset guessedLetters innerHTML
| });
},
hideGuessInfo: function () {
guessesLeftElem.innerHTML = ''; | random_line_split |
Genetic_algorithm.py | for i in range(0,25):
for j in range(10,20):
random_selection.append(array[j])
for i in range(0,20):
for j in range(20,30):
random_selection.append(array[j])
for i in range(0,7):
for j in range(30,40):
random_selection.append(array[j])
for i in range(0,6):
for j in range(40,50):
random_selection.append(array[j])
for i in range(0,5):
for j in range(50,60):
random_selection.append(array[j])
for i in range(0,4):
for j in range(60,70):
random_selection.append(array[j])
for i in range(70,80):
random_selection.append(array[i])
for i in range(80,90):
random_selection.append(array[i])
for i in range(90,100):
random_selection.append(array[i])
selection_array = []
#교차 (전체 개체수 - 엘리트 개체수 - 뮤테이션개수) 개를 만들기위해선 그 두배를 선택해야한다
#가상 룰렛을 만든곳에서 랜덤하게 뽑아서 배열을 생성한다
for i in range(0,num*2):
selection_array.append(random_selection[random.randrange(0,1000)])
return selection_array
#교차는 총 (전체 개체수 - 엘리트 개체수) 의 자식을 생성한다
def Crossover(array,num,mutProb):
#1,2,3 이 파라미터니까 1~2사이 혹은 2~3사이를 한점으로 정해서 single potin crossover
crossover_array = []
tmp_x = 0
tmp_y = 0
tmp_z = 0
n = num*2 -1
for i in range(0,n):
if(random.uniform(1,2) >= 1.5):
# 1 과 2중에 랜덤으로 하나를 선택해서 1일경우
# 그점을 중심으로 교차시킨다
tmp_x = array[i][1]
tmp_y = array[i+1][2]
tmp_z = array[i+1][3]
else:
tmp_x = array[i][1]
tmp_y = array[i][2]
tmp_z = array[i+1][3]
tmp = [0,tmp_x,tmp_y,tmp_z]
# 랜덤값이 확률값보다 작을때 뮤테이션
if(random.uniform(0,100) < mutProb*100):
tmp = Mutation(tmp)
tmp = Erroreval_simple(tmp)
crossover_array.append(tmp)
i +=1
return crossover_array
#돌연 변이 생성
def Mutation(x):
#각 파라미터를 50%확률로 랜덤값을 적용하게 된다
if(random.uniform(1,2) >= 1.5):
x[1] = random.uniform(-10.0,10.0)
if(random.uniform(1,2) >= 1.5):
x[2] = random.uniform(-10.0,10.0)
if(random.uniform(1,2) >= 1.5):
x[2] = random.uniform(-1000.0,1000.0)
return x
##### train 파일
#train 파일을 열어서 값단위로 읽어 배열생성
fd1 = open('salmon_train.txt','r')
salmon_t = fd1.readlines()
fd1.close()
salmon = []
for line in salmon_t:
a = line.split()
salmon.append([float(a[0]),float(a[1])])
fd2 = open('seabass_train.txt','r')
seabass_t = fd2.readlines()
fd2.close( | se()
salmon = []
seabass = []
#읽어들인 txt파일을 줄단위로 읽어들여서 list에 저장한다
for line in salmon_t:
a=line.split()
salmon.append([float(a[0]),float(a[1])])
for line in seabass_t:
a=line.split()
seabass.append([float(a[0]),float(a[1])])
def runExp(popSize, elitNum, mutProb):
print 'training...' #학습 서브루틴
trResFn = 'train_log_%d_%d_%.2f' % (popSize,eliteNum,mutProb)
#####################-학습 시작-#############################
#log 를 입력하기 위한 파일 오픈
f_name = '%s%d%s%d%s%.2f%s' %('train_log_',popSize,'_',elitNum,'_',round(mutProb,2),'.txt')
fd_log = open(f_name,'w')
genetic_array = []
##랜덤 염색체 부여
for i in range(0,popSize):
genetic_array.append([0,random.uniform(-10.0,10.0),random.uniform(-10.0,10.0),random.uniform(-1000,1000)])
#1세대의 적합도 계산
genetic_array = Erroreval(genetic_array)
#랜덤 염색체들을 정렬한다 -> 엘리트들을 고르기위해
genetic_array.sort()
#가장 좋은 염색체의 오류율을 저장해둔다
low_e = genetic_array[0][0]
count = 1
print('%d%s%f%s%f%s%f' % (count,'th elit parameter =>', genetic_array[0][1],' ',genetic_array[0][2],' ',genetic_array[0][3]))
print('%s%d' % ('the lowest error = ',low_e))
fd_log.write('%s%d%s%d%s' % ('count = ',count,'the lowest error = ',low_e,'\n'))
fd_log.write('%d%s%f%f%f%s' % (count,'세대 엘리트 개체 파라미터 =>', genetic_array[0][1],genetic_array[0][2],genetic_array[0][3],'\n'))
#학습은 오류율이 9%이하로 떨어지면 중단한다
while(low_e>8):
count += 1
#선택을 통해서 가상의 룰렛을 만들어 뽑은 염색체를 교차시킨다
child_array = []
child_array = Selection(genetic_array,popSize)
child_array = Crossover(child_array,popSize,mutProb)
#좋은 부모개체 elitNum개는 다음세대에 그대로 물려준다
elitism_array = []
for i in range(0,elitNum):
elitism_array.append(genetic_array[i])
#다음 세대 염색체를 리스트로 합친다
genetic_array = []
genetic_array.extend(child_array)
genetic_array.extend(elitism_array)
genetic_array.sort()
low_e = genetic_array[0][0]
print('%d%s%f%s%f%s%f' % (count,'th elit parameter =>', genetic_array[0][1],' ',genetic_array[0][2],' ',genetic_array[0][3]))
print('%s%d' % ('the lowest error = ',low_e))
fd_log.write('%s%d%s%d%s' % ('count = ',count,'the lowest error = ',low_e,'\n'))
fd_log.write('%d%s%f%f%f%s' % (count,'세대 엘리트 개체 파라미터 =>', genetic_array[0][1],genetic_array[0][2],genetic_array[0][3],'\n'))
fd_log.close()
print 'result file:' ,trResFn
print 'testing...' #테스트 서브루틴
#####################-테스트 시작-#####################################
# train을 통해 학습한 prameter값을 세팅
# 훈련된 염색체 중 가장 상위 염색체의 파라미터 값으로 설정
tmp_x=genetic_array[0][1]
tmp_y=genetic_array[0][2]
tmp_z=genetic_array[0][3]
parameter = [tmp_x,tmp_y,tmp_z]
x_coefficient = -(parameter[0]/parameter[1])
x_constant =( | )
seabass = []
for line in seabass_t:
a = line.split()
seabass.append([float(a[0]),float(a[1])])
##### text 파일
fd1 = open('salmon_test.txt','r')
salmon_t = fd1.readlines()
fd1.close()
fd2 = open('seabass_test.txt','r')
seabass_t = fd2.readlines()
fd2.clo | identifier_body |
Genetic_algorithm.py |
for i in range(0,25):
for j in range(10,20):
random_selection.append(array[j])
for i in range(0,20):
for j in range(20,30):
random_selection.append(array[j])
for i in range(0,7):
for j in range(30,40):
random_selection.append(array[j])
for i in range(0,6):
for j in range(40,50):
random_selection.append(array[j])
for i in range(0,5):
for j in range(50,60):
random_selection.append(array[j])
for i in range(0,4):
for j in range(60,70):
random_selection.append(array[j])
for i in range(70,80):
random_selection.append(array[i])
for i in range(80,90):
random_selection.append(array[i])
for i in range(90,100):
random_selection.append(array[i])
selection_array = []
#교차 (전체 개체수 - 엘리트 개체수 - 뮤테이션개수) 개를 만들기위해선 그 두배를 선택해야한다
#가상 룰렛을 만든곳에서 랜덤하게 뽑아서 배열을 생성한다
for i in range(0,num*2):
selection_array.append(random_selection[random.randrange(0,1000)])
return selection_array
#교차는 총 (전체 개체수 - 엘리트 개체수) 의 자식을 생성한다
def Crossover(array,num,mutProb):
#1,2,3 이 파라미터니까 1~2사이 혹은 2~3사이를 한점으로 정해서 single potin crossover
crossover_array = []
tmp_x = 0
tmp_y = 0
tmp_z = 0
n = num*2 -1
for i in range(0,n):
if(random.uniform(1,2) >= 1.5):
# 1 과 2중에 랜덤으로 하나를 선택해서 1일경우
# 그점을 중심으로 교차시킨다
tmp_x = array[i][1]
tmp_y = array[i+1][2]
tmp_z = array[i+1][3]
else:
tmp_x = array[i][1]
tmp_y = array[i][2]
tmp_z = array[i+1][3]
tmp = [0,tmp_x,tmp_y,tmp_z]
# 랜덤값이 확률값보다 작을때 뮤테이션
if(random.uniform(0,100) < mutProb*100):
tmp = Mutation(tmp)
tmp = Erroreval_simple(tmp)
crossover_array.append(tmp)
i +=1
return crossover_array
#돌연 변이 생성
def Mutation(x):
#각 파라미터를 50%확률로 랜덤값을 적용하게 된다
if(random.uniform(1,2) >= 1.5):
x[1] = random.uniform(-10.0,10.0)
if(random.uniform(1,2) >= 1.5):
x[2] = random.uniform(-10.0,10.0)
if(random.uniform(1,2) >= 1.5):
x[2] = random.uniform(-1000.0,1000.0)
return x
##### train 파일
#train 파일을 열어서 값단위로 읽어 배열생성
fd1 = open('salmon_train.txt','r')
salmon_t = fd1.readlines()
fd1.close()
salmon = []
for line in salmon_t:
a = line.split()
salmon.append([float(a[0]),float(a[1])])
fd2 = open('seabass_train.txt','r')
seabass_t = fd2.readlines()
fd2.close()
seabass = []
for line in seabass_t:
a = line.split()
seabass.append([float(a[0]),float(a[1])])
##### text 파일
fd1 = open('salmon_test.txt','r')
salmon_t = fd1.readlines()
fd1.close()
fd2 = open('seabass_test.txt','r')
seabass_t = fd2.readlines()
fd2.close()
salmon = []
seabass = []
#읽어들인 txt파일을 줄단위로 읽어들여서 list에 저장한다
for line in salmon_t:
a=line.split()
salmon.append([float(a[0]),float(a[1])])
for line in seabass_t:
a=line.split()
seabass.append([float(a[0]),float(a[1])])
def runExp(popSize, elitNum, mutProb):
print 'training...' #학습 서브루틴
trResFn = 'train_log_%d_%d_%.2f' % (popSize,eliteNum,mutProb)
#####################-학습 시작-#############################
#log 를 입력하기 위한 파일 오픈
f_name = '%s%d%s%d%s%.2f%s' %('train_log_',popSize,'_',elitNum,'_',round(mutProb,2),'.txt')
fd_log = open(f_name,'w')
genetic_array = []
##랜덤 염색체 부여
for i in range(0,popSize):
genetic_array.append([0,random.uniform(-10.0,10.0),random.uniform(-10.0,10.0),random.uniform(-1000,1000)])
#1세대의 적합도 계산
genetic_array = Erroreval(genetic_array)
#랜덤 염색체들을 정렬한다 -> 엘리트들을 고르기위해
genetic_array.sort()
#가장 좋은 염색체의 오류율을 저장해둔다
low_e = genetic_array[0][0]
count = 1
print('%d%s%f%s%f%s%f' % (count,'th elit parameter =>', genetic_array[0][1],' ',genetic_array[0][2],' ',genetic_array[0][3]))
print('%s%d' % ('the lowest error = ',low_e))
fd_log.write('%s%d%s%d%s' % ('count = ',count,'the lowest error = ',low_e,'\n'))
fd_log.write('%d%s%f%f%f%s' % (count,'세대 엘리트 개체 파라미터 =>', genetic_array[0][1],genetic_array[0][2],genetic_array[0][3],'\n'))
#학습은 오류율이 9%이하로 떨어지면 중단한다
while(low_e>8):
count += 1
#선택을 통해서 가상의 룰렛을 만들어 뽑은 염색체를 교차시킨다
child_array = []
child_array = Selection(genetic_array,popSize)
child_array = Crossover(child_array,popSize,mutProb)
#좋은 부모개체 elitNum개는 다음세대에 그대로 물려준다
elitism_array = []
for i in range(0,elitNum):
elitism_array.append(genetic_array[i])
| genetic_array = []
genetic_array.extend(child_array)
genetic_array.extend(elitism_array)
genetic_array.sort()
low_e = genetic_array[0][0]
print('%d%s%f%s%f%s%f' % (count,'th elit parameter =>', genetic_array[0][1],' ',genetic_array[0][2],' ',genetic_array[0][3]))
print('%s%d' % ('the lowest error = ',low_e))
fd_log.write('%s%d%s%d%s' % ('count = ',count,'the lowest error = ',low_e,'\n'))
fd_log.write('%d%s%f%f%f%s' % (count,'세대 엘리트 개체 파라미터 =>', genetic_array[0][1],genetic_array[0][2],genetic_array[0][3],'\n'))
fd_log.close()
print 'result file:' ,trResFn
print 'testing...' #테스트 서브루틴
#####################-테스트 시작-#####################################
# train을 통해 학습한 prameter값을 세팅
# 훈련된 염색체 중 가장 상위 염색체의 파라미터 값으로 설정
tmp_x=genetic_array[0][1]
tmp_y=genetic_array[0][2]
tmp_z=genetic_array[0][3]
parameter = [tmp_x,tmp_y,tmp_z]
x_coefficient = -(parameter[0]/parameter[1])
x_constant =(parameter[2 | #다음 세대 염색체를 리스트로 합친다 | random_line_split |
Genetic_algorithm.py | for i in range(0,25):
for j in range(10,20):
random_selection.append(array[j])
for i in range(0,20):
for j in range(20,30):
random_selection.append(array[j])
for i in range(0,7):
for j in range(30,40):
random_selection.append(array[j])
for i in range(0,6):
for j in range(40,50):
random_selection.append(array[j])
for i in range(0,5):
for j in range(50,60):
random_selection.append(array[j])
for i in range(0,4):
for j in range(60,70):
random_selection.append(array[j])
for i in range(70,80):
random_selection.append(array[i])
for i in range(80,90):
random_selection.append(array[i])
for i in range(90,100):
random_selection.append(array[i])
selection_array = []
#교차 (전체 개체수 - 엘리트 개체수 - 뮤테이션개수) 개를 만들기위해선 그 두배를 선택해야한다
#가상 룰렛을 만든곳에서 랜덤하게 뽑아서 배열을 생성한다
for i in range(0,num*2):
selection_array.append(random_selection[random.randrange(0,1000)])
return selection_array
#교차는 총 (전체 개체수 - 엘리트 개체수) 의 자식을 생성한다
def Crossover(array,num,mutProb):
#1,2,3 이 파라미터니까 1~2사이 혹은 2~3사이를 한점으로 정해서 single potin crossover
crossover_array = []
tmp_x = 0
tmp_y = 0
tmp_z = 0
n = num*2 -1
for i in range(0,n):
if(random.uniform(1,2) >= 1.5):
# 1 과 2중에 랜덤으로 하나를 선택해서 1일경우
# 그점을 중심으로 교차시킨다
tmp_x = array[i][1]
tmp_y = array[i+1][2]
tmp_z | i+1][3]
else:
tmp_x = array[i][1]
tmp_y = array[i][2]
tmp_z = array[i+1][3]
tmp = [0,tmp_x,tmp_y,tmp_z]
# 랜덤값이 확률값보다 작을때 뮤테이션
if(random.uniform(0,100) < mutProb*100):
tmp = Mutation(tmp)
tmp = Erroreval_simple(tmp)
crossover_array.append(tmp)
i +=1
return crossover_array
#돌연 변이 생성
def Mutation(x):
#각 파라미터를 50%확률로 랜덤값을 적용하게 된다
if(random.uniform(1,2) >= 1.5):
x[1] = random.uniform(-10.0,10.0)
if(random.uniform(1,2) >= 1.5):
x[2] = random.uniform(-10.0,10.0)
if(random.uniform(1,2) >= 1.5):
x[2] = random.uniform(-1000.0,1000.0)
return x
##### train 파일
#train 파일을 열어서 값단위로 읽어 배열생성
fd1 = open('salmon_train.txt','r')
salmon_t = fd1.readlines()
fd1.close()
salmon = []
for line in salmon_t:
a = line.split()
salmon.append([float(a[0]),float(a[1])])
fd2 = open('seabass_train.txt','r')
seabass_t = fd2.readlines()
fd2.close()
seabass = []
for line in seabass_t:
a = line.split()
seabass.append([float(a[0]),float(a[1])])
##### text 파일
fd1 = open('salmon_test.txt','r')
salmon_t = fd1.readlines()
fd1.close()
fd2 = open('seabass_test.txt','r')
seabass_t = fd2.readlines()
fd2.close()
salmon = []
seabass = []
#읽어들인 txt파일을 줄단위로 읽어들여서 list에 저장한다
for line in salmon_t:
a=line.split()
salmon.append([float(a[0]),float(a[1])])
for line in seabass_t:
a=line.split()
seabass.append([float(a[0]),float(a[1])])
def runExp(popSize, elitNum, mutProb):
print 'training...' #학습 서브루틴
trResFn = 'train_log_%d_%d_%.2f' % (popSize,eliteNum,mutProb)
#####################-학습 시작-#############################
#log 를 입력하기 위한 파일 오픈
f_name = '%s%d%s%d%s%.2f%s' %('train_log_',popSize,'_',elitNum,'_',round(mutProb,2),'.txt')
fd_log = open(f_name,'w')
genetic_array = []
##랜덤 염색체 부여
for i in range(0,popSize):
genetic_array.append([0,random.uniform(-10.0,10.0),random.uniform(-10.0,10.0),random.uniform(-1000,1000)])
#1세대의 적합도 계산
genetic_array = Erroreval(genetic_array)
#랜덤 염색체들을 정렬한다 -> 엘리트들을 고르기위해
genetic_array.sort()
#가장 좋은 염색체의 오류율을 저장해둔다
low_e = genetic_array[0][0]
count = 1
print('%d%s%f%s%f%s%f' % (count,'th elit parameter =>', genetic_array[0][1],' ',genetic_array[0][2],' ',genetic_array[0][3]))
print('%s%d' % ('the lowest error = ',low_e))
fd_log.write('%s%d%s%d%s' % ('count = ',count,'the lowest error = ',low_e,'\n'))
fd_log.write('%d%s%f%f%f%s' % (count,'세대 엘리트 개체 파라미터 =>', genetic_array[0][1],genetic_array[0][2],genetic_array[0][3],'\n'))
#학습은 오류율이 9%이하로 떨어지면 중단한다
while(low_e>8):
count += 1
#선택을 통해서 가상의 룰렛을 만들어 뽑은 염색체를 교차시킨다
child_array = []
child_array = Selection(genetic_array,popSize)
child_array = Crossover(child_array,popSize,mutProb)
#좋은 부모개체 elitNum개는 다음세대에 그대로 물려준다
elitism_array = []
for i in range(0,elitNum):
elitism_array.append(genetic_array[i])
#다음 세대 염색체를 리스트로 합친다
genetic_array = []
genetic_array.extend(child_array)
genetic_array.extend(elitism_array)
genetic_array.sort()
low_e = genetic_array[0][0]
print('%d%s%f%s%f%s%f' % (count,'th elit parameter =>', genetic_array[0][1],' ',genetic_array[0][2],' ',genetic_array[0][3]))
print('%s%d' % ('the lowest error = ',low_e))
fd_log.write('%s%d%s%d%s' % ('count = ',count,'the lowest error = ',low_e,'\n'))
fd_log.write('%d%s%f%f%f%s' % (count,'세대 엘리트 개체 파라미터 =>', genetic_array[0][1],genetic_array[0][2],genetic_array[0][3],'\n'))
fd_log.close()
print 'result file:' ,trResFn
print 'testing...' #테스트 서브루틴
#####################-테스트 시작-#####################################
# train을 통해 학습한 prameter값을 세팅
# 훈련된 염색체 중 가장 상위 염색체의 파라미터 값으로 설정
tmp_x=genetic_array[0][1]
tmp_y=genetic_array[0][2]
tmp_z=genetic_array[0][3]
parameter = [tmp_x,tmp_y,tmp_z]
x_coefficient = -(parameter[0]/parameter[1])
x_constant =(parameter[ | = array[ | identifier_name |
Genetic_algorithm.py | for i in range(0,25):
for j in range(10,20):
random_selection.append(array[j])
for i in range(0,20):
for j in range(20,30):
random_selection.append(array[j])
for i in range(0,7):
for j in range(30,40):
r |
for i in range(0,6):
for j in range(40,50):
random_selection.append(array[j])
for i in range(0,5):
for j in range(50,60):
random_selection.append(array[j])
for i in range(0,4):
for j in range(60,70):
random_selection.append(array[j])
for i in range(70,80):
random_selection.append(array[i])
for i in range(80,90):
random_selection.append(array[i])
for i in range(90,100):
random_selection.append(array[i])
selection_array = []
#교차 (전체 개체수 - 엘리트 개체수 - 뮤테이션개수) 개를 만들기위해선 그 두배를 선택해야한다
#가상 룰렛을 만든곳에서 랜덤하게 뽑아서 배열을 생성한다
for i in range(0,num*2):
selection_array.append(random_selection[random.randrange(0,1000)])
return selection_array
#교차는 총 (전체 개체수 - 엘리트 개체수) 의 자식을 생성한다
def Crossover(array,num,mutProb):
#1,2,3 이 파라미터니까 1~2사이 혹은 2~3사이를 한점으로 정해서 single potin crossover
crossover_array = []
tmp_x = 0
tmp_y = 0
tmp_z = 0
n = num*2 -1
for i in range(0,n):
if(random.uniform(1,2) >= 1.5):
# 1 과 2중에 랜덤으로 하나를 선택해서 1일경우
# 그점을 중심으로 교차시킨다
tmp_x = array[i][1]
tmp_y = array[i+1][2]
tmp_z = array[i+1][3]
else:
tmp_x = array[i][1]
tmp_y = array[i][2]
tmp_z = array[i+1][3]
tmp = [0,tmp_x,tmp_y,tmp_z]
# 랜덤값이 확률값보다 작을때 뮤테이션
if(random.uniform(0,100) < mutProb*100):
tmp = Mutation(tmp)
tmp = Erroreval_simple(tmp)
crossover_array.append(tmp)
i +=1
return crossover_array
#돌연 변이 생성
def Mutation(x):
#각 파라미터를 50%확률로 랜덤값을 적용하게 된다
if(random.uniform(1,2) >= 1.5):
x[1] = random.uniform(-10.0,10.0)
if(random.uniform(1,2) >= 1.5):
x[2] = random.uniform(-10.0,10.0)
if(random.uniform(1,2) >= 1.5):
x[2] = random.uniform(-1000.0,1000.0)
return x
##### train 파일
#train 파일을 열어서 값단위로 읽어 배열생성
fd1 = open('salmon_train.txt','r')
salmon_t = fd1.readlines()
fd1.close()
salmon = []
for line in salmon_t:
a = line.split()
salmon.append([float(a[0]),float(a[1])])
fd2 = open('seabass_train.txt','r')
seabass_t = fd2.readlines()
fd2.close()
seabass = []
for line in seabass_t:
a = line.split()
seabass.append([float(a[0]),float(a[1])])
##### text 파일
fd1 = open('salmon_test.txt','r')
salmon_t = fd1.readlines()
fd1.close()
fd2 = open('seabass_test.txt','r')
seabass_t = fd2.readlines()
fd2.close()
salmon = []
seabass = []
#읽어들인 txt파일을 줄단위로 읽어들여서 list에 저장한다
for line in salmon_t:
a=line.split()
salmon.append([float(a[0]),float(a[1])])
for line in seabass_t:
a=line.split()
seabass.append([float(a[0]),float(a[1])])
def runExp(popSize, elitNum, mutProb):
print 'training...' #학습 서브루틴
trResFn = 'train_log_%d_%d_%.2f' % (popSize,eliteNum,mutProb)
#####################-학습 시작-#############################
#log 를 입력하기 위한 파일 오픈
f_name = '%s%d%s%d%s%.2f%s' %('train_log_',popSize,'_',elitNum,'_',round(mutProb,2),'.txt')
fd_log = open(f_name,'w')
genetic_array = []
##랜덤 염색체 부여
for i in range(0,popSize):
genetic_array.append([0,random.uniform(-10.0,10.0),random.uniform(-10.0,10.0),random.uniform(-1000,1000)])
#1세대의 적합도 계산
genetic_array = Erroreval(genetic_array)
#랜덤 염색체들을 정렬한다 -> 엘리트들을 고르기위해
genetic_array.sort()
#가장 좋은 염색체의 오류율을 저장해둔다
low_e = genetic_array[0][0]
count = 1
print('%d%s%f%s%f%s%f' % (count,'th elit parameter =>', genetic_array[0][1],' ',genetic_array[0][2],' ',genetic_array[0][3]))
print('%s%d' % ('the lowest error = ',low_e))
fd_log.write('%s%d%s%d%s' % ('count = ',count,'the lowest error = ',low_e,'\n'))
fd_log.write('%d%s%f%f%f%s' % (count,'세대 엘리트 개체 파라미터 =>', genetic_array[0][1],genetic_array[0][2],genetic_array[0][3],'\n'))
#학습은 오류율이 9%이하로 떨어지면 중단한다
while(low_e>8):
count += 1
#선택을 통해서 가상의 룰렛을 만들어 뽑은 염색체를 교차시킨다
child_array = []
child_array = Selection(genetic_array,popSize)
child_array = Crossover(child_array,popSize,mutProb)
#좋은 부모개체 elitNum개는 다음세대에 그대로 물려준다
elitism_array = []
for i in range(0,elitNum):
elitism_array.append(genetic_array[i])
#다음 세대 염색체를 리스트로 합친다
genetic_array = []
genetic_array.extend(child_array)
genetic_array.extend(elitism_array)
genetic_array.sort()
low_e = genetic_array[0][0]
print('%d%s%f%s%f%s%f' % (count,'th elit parameter =>', genetic_array[0][1],' ',genetic_array[0][2],' ',genetic_array[0][3]))
print('%s%d' % ('the lowest error = ',low_e))
fd_log.write('%s%d%s%d%s' % ('count = ',count,'the lowest error = ',low_e,'\n'))
fd_log.write('%d%s%f%f%f%s' % (count,'세대 엘리트 개체 파라미터 =>', genetic_array[0][1],genetic_array[0][2],genetic_array[0][3],'\n'))
fd_log.close()
print 'result file:' ,trResFn
print 'testing...' #테스트 서브루틴
#####################-테스트 시작-#####################################
# train을 통해 학습한 prameter값을 세팅
# 훈련된 염색체 중 가장 상위 염색체의 파라미터 값으로 설정
tmp_x=genetic_array[0][1]
tmp_y=genetic_array[0][2]
tmp_z=genetic_array[0][3]
parameter = [tmp_x,tmp_y,tmp_z]
x_coefficient = -(parameter[0]/parameter[1])
x_constant =(parameter[ | andom_selection.append(array[j])
| conditional_block |
main.go | }
logs.Debug("etcd clientv3.New success")
defer cli.Close()
//获取key所对应的值
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
resp, err1 := cli.Get(ctx, key)
logs.Debug("etcd get key=%s sucess\n", key)
cancel()
if err1 != nil {
logs.Error("cli.Get err", err1)
err = err1
}
for _, ev := range resp.Kvs {
logs.Debug("etcd get key=%s ,value=%s\n", ev.Key, ev.Value)
result[string(ev.Key)] = string(ev.Value)
}
return
}
//获取kafka 跟日志路径后 并检测其变化
func watchetcdkey(endpoint []string, key string) {
//defer wg.Done()
fmt.Println("watchetcdkey keys", key)
result := make(map[string]string, len(key))
cli, err := clientv3.New(clientv3.Config{
Endpoints: endpoint,
DialTimeout: 5 * time.Second,
})
if err != nil {
fmt.Println("clientv3.New err", err)
}
defer cli.Close()
//是否需要更新etcd的值
b := false
//监听该值的变化
fmt.Println("watching keys", key)
rch := cli.Watch(context.Background(), key, clientv3.WithPrefix())
var (
k, v string
)
for wresp := range rch {
for _, ev := range wresp.Events {
k = string(ev.Kv.Key)
v = string(ev.Kv.Value)
if k == AppConfig.etcdkeycollect {
switch ev.Type {
case mvccpb.DELETE:
logs.Error(fmt.Sprintf("key is DELETE,key=:%s", k))
result[string(ev.Kv.Key)] = "DELETE"
b = true
case mvccpb.PUT:
logs.Debug(fmt.Sprintf("key is update,key=:%s", k))
if err != nil {
logs.Error(fmt.Sprintf("cli.Watch getkey,key:%s, err:%s", k, err))
} else {
b = true
result[k] = v
// fmt.Println("updateKeys ", result)
// updateKeys(&result)
// logs.Debug(fmt.Sprintf("updateKeys:%v ", result))
}
default:
logs.Debug(fmt.Sprintf("%s %q :%q \n", ev.Type, ev.Kv.Key, ev.Kv.Value))
}
}
}
if b {
updateKeys(&result)
}
}
// if b {
// fmt.Println("updateKeys ", result)
// updateKeys(&result)
// logs.Debug(fmt.Sprintf("updateKeys:%v ", result))
// }
}
func updateKeys(result *map[string]string) {
logs.Debug("updateKeys:%v ", result)
endpoints := []string{AppConfig.kafkaAddr}
for _, v := range *result {
if v != "DELETE" {
var collectTemplist []CollectionInfo
err := json.Unmarshal([]byte(v), &collectTemplist)
if err != nil {
logs.Error("json Unmarshal etcdkeycollect err", err)
return
}
logs.Debug("update keys after json.Unmarshal collectTemplist:", collectTemplist)
//停止现有的
for i := 0; i < len(CollectList); i++ {
logs.Debug("stop current goroutine path :%s", CollectList[i].Path)
CollectList[i].update <- true
}
//清除CollectList
CollectList = append(CollectList, CollectList[:0]...)
CollectList = collectTemplist
logs.Debug("new CollectList =======:", CollectList)
for i := 0; i < len(CollectList); i++ {
if CollectList[i].update == nil {
CollectList[i].update = make(chan bool)
}
wg.Add(1)
lines := make(chan *tail.Line)
logs.Debug("start update new address ", CollectList[i].Path)
go readLog(lines, &CollectList[i])
// 读取出来,放到kafka上即可
go sendMsg(lines, &CollectList[i], endpoints)
}
logs.Debug("update keys read send log CollectList:", CollectList)
} else {
//停止被删除路径的读取
}
}
}
//读取相应路径下的日志
func readLog(msgchan chan *tail.Line, collectionInfo *CollectionInfo) {
logs.Debug("tail.TailFile init CollectionInfo:%v addr is %p", collectionInfo, collectionInfo)
tails, err := tail.TailFile(collectionInfo.Path, tail.Config{
ReOpen: true,
Follow: true,
//Location: &tail.SeekInfo{Offset: 0, Whence: 2},
MustExist: false,
Poll: true,
})
if err != nil {
logs.Error("tail.TailFile err:", err)
return
}
logs.Debug("tail.TailFile init success")
var (
msg *tail.Line
ok bool
)
defer close(msgchan)
for {
select {
case msg = <-tails.Lines:
logs.Info("============i am ready for read log of %s=========", collectionInfo.Path)
if len(msg.Text) != 0 {
msgchan <- msg
logs.Debug("read log,msg len is: %d ----- info is:%s\n", len(msg.Text), msg.Text)
}
case ok = <-collectionInfo.update:
if ok {
//close(msgchan)
logs.Debug("check path:%s is update so return current ", collectionInfo.Path)
return
}
default:
//logs.Info("============read log chan is block path is %s=========", collectionInfo.Path)
}
}
}
//给kafka发送消息
func sendMsg(lines chan *tail.Line, collectionInfo *CollectionInfo, endpoint []string) {
defer wg.Done()
config := sarama.NewConfig()
//是否需要回复
config.Producer.RequiredAcks = sarama.WaitForAll
//消息分区 设置为随机的
config.Producer.Partitioner = sarama.NewRandomPartitioner
config.Producer.Return.Successes = true
//新建一个同步的发送者 地址是参数
client, err := sarama.NewSyncProducer(endpoint, config)
defer client.Close()
if err != nil {
logs.Error("sarama.NewAsyncProducer err:", err)
return
}
logs.Info("start sendmsg to kafka:")
var (
pid int32
offset int64
msgsend *tail.Line
ok bool
)
for {
//ok=false 代表chan关闭 应该结束
logs.Info("start forever revice path:%s and sendmsg to kafka:", collectionInfo.Path)
if msgsend, ok = <-lines; ok {
logs.Info("sendmsg to kafak msg is %s\n", msgsend.Text)
msg := &sarama.ProducerMessage{
Topic: collectionInfo.Topic,
Value: sarama.StringEncoder(msgsend.Text),
}
pid, offset, err = client.SendMessage(msg)
if err != nil {
logs.Error("client.SendMesage err:", err)
return
}
logs.Info("sendmsg to kafak success ,pid:%v, offset:%v", pid, offset)
} else {
logs.Error("check path:%s read chan is closed", collectionInfo.Path)
return
}
}
}
//给kafka发送消息
func sendMsgAsync(lines chan *tail.Line, topic string, endpoint []string) {
config := sarama.NewConfig()
//是否需要回复
config.Producer.RequiredAcks = sarama.WaitForAll
//消息分区 设置为随机的
config.Producer.Partitioner = sarama.NewRandomPartitioner
config.Producer.Return.Successes = true
//新建一个同步的发送者 地址是参数
client, err := sarama.NewSyncProducer(endpoint, config)
defer client.Close()
if err != nil {
logs.Error("sarama.NewAsyncProducer err:", err)
return
}
logs.Info("start sendmsg to kafka:")
var (
pid int32
offset int64
msgsend *tail.Line
ok bool
)
for {
//ok=false 代表chan关闭 应该结束
if msgsend, ok = <-lines; ok {
logs.Info("sendmsg to kafak msg is %s:", msgsend.Text)
msg := &sarama.ProducerMessage{
Topic: topic,
Value: sarama.StringEncoder(msgsend.Text),
}
pid, offset, err = client.SendMessage(msg)
if err != nil {
logs.Error("client.SendMesage err:", err)
return
}
logs.Info("sendmsg to kafak success ,pid:%v, offset:%v", pid, offset)
} else {
logs.Error("client.SendMesage chan close:", ok)
return
}
}
}
| conditional_block |
||
main.go | keycollect]), &CollectList)
if err != nil {
logs.Error("json Unmarshal etcdkeycollect err:%v", err)
}
for i := 0; i < len(CollectList); i++ {
if CollectList[i].update == nil {
CollectList[i].update = make(chan bool)
}
}
logs.Debug("获取的配置信息:%v", AppConfig)
//根据etcd读取配置文件 开始跟踪日志
endpoints := []string{AppConfig.kafkaAddr}
lines := make(chan *tail.Line)
for i := 0; i < len(CollectList); i++ {
logs.Debug("update keys before path%s addr is:%x", CollectList[i].Path, &CollectList[i])
if CollectList[i].update == nil {
CollectList[i].update = make(chan bool)
}
wg.Add(2)
go readLog(lines, &CollectList[i])
// 读取出来,放到kafka上即可
go sendMsg(lines, &CollectList[i], endpoints)
}
wg.Add(1)
watchetcdkey(endpointsetcd, AppConfig.etcdkeycollect)
}
wg.Wait()
}
//LoadConfig 加载配置文件
func loadConfig(configType, path string) (myConfig MyConfig, err error) {
defer func(myConfig *MyConfig) {
logs.Debug("read kafka addr=: ", myConfig.kafkaAddr)
logs.Debug("read etcdaddr=: ", myConfig.etcdAddr)
logs.Debug("read etcdkeycollect=: ", myConfig.etcdkeycollect)
}(&myConfig)
conf, err := config.NewConfig(configType, path)
if err != nil {
logs.Error("new config failed, err:", err)
}
logs.Debug("读取配置得路径是:", path)
myConfig.kafkaAddr = conf.String("kafka::addr")
if len(myConfig.kafkaAddr) == 0 {
myConfig.kafkaAddr = "127.0.0.1:9092"
err = errors.New("Not find server ip ,use default addr:127.0.0.1:9092")
}
myConfig.etcdAddr = conf.String("etcd::addr")
if len(myConfig.etcdAddr) == 0 {
err = errors.New("Not find etcd path,use defauly ip port:127.0.0.1:2379 ")
myConfig.etcdAddr = "127.0.0.1:2379"
}
myConfig.etcdkeycollect = conf.String("etcd::keycollect")
if len(myConfig.etcdkeycollect) == 0 {
err = errors.New("Not find etcd keycollect")
return
}
return
}
///初始化系统日志信息
func initAppLog(path string) (err error) {
// config := make(map[string]interface{})
// logpath := path + `\logagent\Logs`
// //没有则创建
// err = os.MkdirAll(logpath, os.ModeDir)
// if err != nil {
// config["filename"] = `longagent.log`
// } else {
// config["filename"] = path + `\logagent\Logs\longagent.log`
// }
// //设置不同级别的分开写
// config["separate"] = []string{"error", "info", "debug"}
// //输出调用的文件名和文件行号 默认是false
// logs.EnableFuncCallDepth(true)
// //异步输出 设置缓冲chan 为2
// logs.Async(3)
// //多文件 debug error 等分开写
// configJSON, err1 := json.Marshal(config)
// if err1 != nil {
// err = err1
// err = logs.SetLogger(logs.AdapterMultiFile, `{"filename":"longagent.log"}`)
// } else {
// err = logs.SetLogger(logs.AdapterMultiFile, string(configJSON))
// }
//现在为了调试方便使用 输出到终端
logs.SetLogger(logs.AdapterConsole)
return
}
//初始化etcd
func initetcd(endpoint []string, key string) (result map[string]string, err error) {
cli, err := clientv3.New(clientv3.Config{
Endpoints: endpoint,
DialTimeout: 5 * time.Second,
})
result = make(map[string]string, len(key))
if err != nil {
logs.Error("etcd clientv3.New err", err)
return
}
logs.Debug("etcd clientv3.New success")
defer cli.Close()
//获取key所对应的值
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
resp, err1 := cli.Get(ctx, key)
logs.Debug("etcd get key=%s sucess\n", key)
cancel()
if err1 != nil {
logs.Error("cli.Get err", err1)
err = err1
}
for _, ev := range resp.Kvs {
logs.Debug("etcd get key=%s ,value=%s\n", ev.Key, ev.Value)
result[string(ev.Key)] = string(ev.Value)
}
return
}
//获取kafka 跟日志路径后 并检测其变化
func watchetcdkey(endpoint []string, key string) {
//defer wg.Done()
fmt.Println("watchetcdkey keys", key) | Endpoints: endpoint,
DialTimeout: 5 * time.Second,
})
if err != nil {
fmt.Println("clientv3.New err", err)
}
defer cli.Close()
//是否需要更新etcd的值
b := false
//监听该值的变化
fmt.Println("watching keys", key)
rch := cli.Watch(context.Background(), key, clientv3.WithPrefix())
var (
k, v string
)
for wresp := range rch {
for _, ev := range wresp.Events {
k = string(ev.Kv.Key)
v = string(ev.Kv.Value)
if k == AppConfig.etcdkeycollect {
switch ev.Type {
case mvccpb.DELETE:
logs.Error(fmt.Sprintf("key is DELETE,key=:%s", k))
result[string(ev.Kv.Key)] = "DELETE"
b = true
case mvccpb.PUT:
logs.Debug(fmt.Sprintf("key is update,key=:%s", k))
if err != nil {
logs.Error(fmt.Sprintf("cli.Watch getkey,key:%s, err:%s", k, err))
} else {
b = true
result[k] = v
// fmt.Println("updateKeys ", result)
// updateKeys(&result)
// logs.Debug(fmt.Sprintf("updateKeys:%v ", result))
}
default:
logs.Debug(fmt.Sprintf("%s %q :%q \n", ev.Type, ev.Kv.Key, ev.Kv.Value))
}
}
}
if b {
updateKeys(&result)
}
}
// if b {
// fmt.Println("updateKeys ", result)
// updateKeys(&result)
// logs.Debug(fmt.Sprintf("updateKeys:%v ", result))
// }
}
func updateKeys(result *map[string]string) {
logs.Debug("updateKeys:%v ", result)
endpoints := []string{AppConfig.kafkaAddr}
for _, v := range *result {
if v != "DELETE" {
var collectTemplist []CollectionInfo
err := json.Unmarshal([]byte(v), &collectTemplist)
if err != nil {
logs.Error("json Unmarshal etcdkeycollect err", err)
return
}
logs.Debug("update keys after json.Unmarshal collectTemplist:", collectTemplist)
//停止现有的
for i := 0; i < len(CollectList); i++ {
logs.Debug("stop current goroutine path :%s", CollectList[i].Path)
CollectList[i].update <- true
}
//清除CollectList
CollectList = append(CollectList, CollectList[:0]...)
CollectList = collectTemplist
logs.Debug("new CollectList =======:", CollectList)
for i := 0; i < len(CollectList); i++ {
if CollectList[i].update == nil {
CollectList[i].update = make(chan bool)
}
wg.Add(1)
lines := make(chan *tail.Line)
logs.Debug("start update new address ", CollectList[i].Path)
go readLog(lines, &CollectList[i])
// 读取出来,放到kafka上即可
go sendMsg(lines, &CollectList[i], endpoints)
}
logs.Debug("update keys read send log CollectList:", CollectList)
} else {
//停止被删除路径的读取
}
}
}
//读取相应路径下的日志
func readLog(msgchan chan *tail.Line, collectionInfo *CollectionInfo) {
logs.Debug("tail.TailFile init CollectionInfo:%v addr is %p", collectionInfo, collection | result := make(map[string]string, len(key))
cli, err := clientv3.New(clientv3.Config{ | random_line_split |
main.go | // //没有则创建
// err = os.MkdirAll(logpath, os.ModeDir)
// if err != nil {
// config["filename"] = `longagent.log`
// } else {
// config["filename"] = path + `\logagent\Logs\longagent.log`
// }
// //设置不同级别的分开写
// config["separate"] = []string{"error", "info", "debug"}
// //输出调用的文件名和文件行号 默认是false
// logs.EnableFuncCallDepth(true)
// //异步输出 设置缓冲chan 为2
// logs.Async(3)
// //多文件 debug error 等分开写
// configJSON, err1 := json.Marshal(config)
// if err1 != nil {
// err = err1
// err = logs.SetLogger(logs.AdapterMultiFile, `{"filename":"longagent.log"}`)
// } else {
// err = logs.SetLogger(logs.AdapterMultiFile, string(configJSON))
// }
//现在为了调试方便使用 输出到终端
logs.SetLogger(logs.AdapterConsole)
return
}
//初始化etcd
func initetcd(endpoint []string, key string) (result map[string]string, err error) {
cli, err := clientv3.New(clientv3.Config{
Endpoints: endpoint,
DialTimeout: 5 * time.Second,
})
result = make(map[string]string, len(key))
if err != nil {
logs.Error("etcd clientv3.New err", err)
return
}
logs.Debug("etcd clientv3.New success")
defer cli.Close()
//获取key所对应的值
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
resp, err1 := cli.Get(ctx, key)
logs.Debug("etcd get key=%s sucess\n", key)
cancel()
if err1 != nil {
logs.Error("cli.Get err", err1)
err = err1
}
for _, ev := range resp.Kvs {
logs.Debug("etcd get key=%s ,value=%s\n", ev.Key, ev.Value)
result[string(ev.Key)] = string(ev.Value)
}
return
}
//获取kafka 跟日志路径后 并检测其变化
func watchetcdkey(endpoint []string, key string) {
//defer wg.Done()
fmt.Println("watchetcdkey keys", key)
result := make(map[string]string, len(key))
cli, err := clientv3.New(clientv3.Config{
Endpoints: endpoint,
DialTimeout: 5 * time.Second,
})
if err != nil {
fmt.Println("clientv3.New err", err)
}
defer cli.Close()
//是否需要更新etcd的值
b := false
//监听该值的变化
fmt.Println("watching keys", key)
rch := cli.Watch(context.Background(), key, clientv3.WithPrefix())
var (
k, v string
)
for wresp := range rch {
for _, ev := range wresp.Events {
k = string(ev.Kv.Key)
v = string(ev.Kv.Value)
if k == AppConfig.etcdkeycollect {
switch ev.Type {
case mvccpb.DELETE:
logs.Error(fmt.Sprintf("key is DELETE,key=:%s", k))
result[string(ev.Kv.Key)] = "DELETE"
b = true
case mvccpb.PUT:
logs.Debug(fmt.Sprintf("key is update,key=:%s", k))
if err != nil {
logs.Error(fmt.Sprintf("cli.Watch getkey,key:%s, err:%s", k, err))
} else {
b = true
result[k] = v
// fmt.Println("updateKeys ", result)
// updateKeys(&result)
// logs.Debug(fmt.Sprintf("updateKeys:%v ", result))
}
default:
logs.Debug(fmt.Sprintf("%s %q :%q \n", ev.Type, ev.Kv.Key, ev.Kv.Value))
}
}
}
if b {
updateKeys(&result)
}
}
// if b {
// fmt.Println("updateKeys ", result)
// updateKeys(&result)
// logs.Debug(fmt.Sprintf("updateKeys:%v ", result))
// }
}
func updateKeys(result *map[string]string) {
logs.Debug("updateKeys:%v ", result)
endpoints := []string{AppConfig.kafkaAddr}
for _, v := range *result {
if v != "DELETE" {
var collectTemplist []CollectionInfo
err := json.Unmarshal([]byte(v), &collectTemplist)
if err != nil {
logs.Error("json Unmarshal etcdkeycollect err", err)
return
}
logs.Debug("update keys after json.Unmarshal collectTemplist:", collectTemplist)
//停止现有的
for i := 0; i < len(CollectList); i++ {
logs.Debug("stop current goroutine path :%s", CollectList[i].Path)
CollectList[i].update <- true
}
//清除CollectList
CollectList = append(CollectList, CollectList[:0]...)
CollectList = collectTemplist
logs.Debug("new CollectList =======:", CollectList)
for i := 0; i < len(CollectList); i++ {
if CollectList[i].update == nil {
CollectList[i].update = make(chan bool)
}
wg.Add(1)
lines := make(chan *tail.Line)
logs.Debug("start update new address ", CollectList[i].Path)
go readLog(lines, &CollectList[i])
// 读取出来,放到kafka上即可
go sendMsg(lines, &CollectList[i], endpoints)
}
logs.Debug("update keys read send log CollectList:", CollectList)
} else {
//停止被删除路径的读取
}
}
}
//读取相应路径下的日志
func readLog(msgchan chan *tail.Line, collectionInfo *CollectionInfo) {
logs.Debug("tail.TailFile init CollectionInfo:%v addr is %p", collectionInfo, collectionInfo)
tails, err := tail.TailFile(collectionInfo.Path, tail.Config{
ReOpen: true,
Follow: true,
//Location: &tail.SeekInfo{Offset: 0, Whence: 2},
MustExist: false,
Poll: true,
})
if err != nil {
logs.Error("tail.TailFile err:", err)
return
}
logs.Debug("tail.TailFile init success")
var (
msg *tail.Line
ok bool
)
defer close(msgchan)
for {
select {
case msg = <-tails.Lines:
logs.Info("============i am ready for read log of %s=========", collectionInfo.Path)
if len(msg.Text) != 0 {
msgchan <- msg
logs.Debug("read log,msg len is: %d ----- info is:%s\n", len(msg.Text), msg.Text)
}
case ok = <-collectionInfo.update:
if ok {
//close(msgchan)
logs.Debug("check path:%s is update so return current ", collectionInfo.Path)
return
}
default:
//logs.Info("============read log chan is block path is %s=========", collectionInfo.Path)
}
}
}
//给kafka发送消息
func sendMsg(lines chan *tail.Line, collectionInfo *CollectionInfo, endpoint []string) {
defer wg.Done()
config := sarama.NewConfig()
//是否需要回复
config.Producer.RequiredAcks = sarama.WaitForAll
//消息分区 设置为随机的
config.Producer.Partitioner = sarama.NewRandomPartitioner
config.Producer.Return.Successes = true
//新建一个同步的发送者 地址是参数
client, err := sarama.NewSyncProducer(endpoint, config)
defer client.Close()
if err != nil {
logs.Error("sarama.NewAsyncProducer err:", err)
return
}
logs.Info("start sendmsg to kafka:")
var (
pid int32
offset int64
msgsend *tail.Line
ok bool
)
for {
//ok=false 代表chan关闭 应该结束
logs.Info("start fore | ver revice path:%s and sendmsg to kafka:", collectionInfo.Path)
if msgsend, ok = <-lines; ok {
logs.Info("sendmsg to kafak msg is %s\n", msgsend.Text)
msg := &sarama.ProducerMessage{
Topic: collectionInfo.Topic,
Value: sarama.StringEncoder(msgsend.Text),
}
pid, offset, err = client.SendMessage(msg)
if err != nil {
logs.Error("client.SendMesage err:", err)
return
}
logs.Info("sendmsg to kafak success ,pid:%v, offset:%v", pid, offset)
} else {
logs.Error("check path:%s read chan is closed", collectionInfo.Path)
return
}
} | identifier_body |
|
main.go | keycollect]), &CollectList)
if err != nil {
logs.Error("json Unmarshal etcdkeycollect err:%v", err)
}
for i := 0; i < len(CollectList); i++ {
if CollectList[i].update == nil {
CollectList[i].update = make(chan bool)
}
}
logs.Debug("获取的配置信息:%v", AppConfig)
//根据etcd读取配置文件 开始跟踪日志
endpoints := []string{AppConfig.kafkaAddr}
lines := make(chan *tail.Line)
for i := 0; i < len(CollectList); i++ {
logs.Debug("update keys before path%s addr is:%x", CollectList[i].Path, &CollectList[i])
if CollectList[i].update == nil {
CollectList[i].update = make(chan bool)
}
wg.Add(2)
go readLog(lines, &CollectList[i])
// 读取出来,放到kafka上即可
go sendMsg(lines, &CollectList[i], endpoints)
}
wg.Add(1)
watchetcdkey(endpointsetcd, AppConfig.etcdkeycollect)
}
wg.Wait()
}
//LoadConfig 加载配置文件
func loadConfig(configType, path string) (myConfig MyConfig, err error) {
defer func(myConfig *MyConfig) {
logs.Debug("read kafka addr=: ", myConfig.kafkaAddr)
logs.Debug("read etcdaddr=: ", myConfig.etcdAddr)
logs.Debug("read etcdkeycollect=: ", myC | keycollect)
}(&myConfig)
conf, err := config.NewConfig(configType, path)
if err != nil {
logs.Error("new config failed, err:", err)
}
logs.Debug("读取配置得路径是:", path)
myConfig.kafkaAddr = conf.String("kafka::addr")
if len(myConfig.kafkaAddr) == 0 {
myConfig.kafkaAddr = "127.0.0.1:9092"
err = errors.New("Not find server ip ,use default addr:127.0.0.1:9092")
}
myConfig.etcdAddr = conf.String("etcd::addr")
if len(myConfig.etcdAddr) == 0 {
err = errors.New("Not find etcd path,use defauly ip port:127.0.0.1:2379 ")
myConfig.etcdAddr = "127.0.0.1:2379"
}
myConfig.etcdkeycollect = conf.String("etcd::keycollect")
if len(myConfig.etcdkeycollect) == 0 {
err = errors.New("Not find etcd keycollect")
return
}
return
}
///初始化系统日志信息
func initAppLog(path string) (err error) {
// config := make(map[string]interface{})
// logpath := path + `\logagent\Logs`
// //没有则创建
// err = os.MkdirAll(logpath, os.ModeDir)
// if err != nil {
// config["filename"] = `longagent.log`
// } else {
// config["filename"] = path + `\logagent\Logs\longagent.log`
// }
// //设置不同级别的分开写
// config["separate"] = []string{"error", "info", "debug"}
// //输出调用的文件名和文件行号 默认是false
// logs.EnableFuncCallDepth(true)
// //异步输出 设置缓冲chan 为2
// logs.Async(3)
// //多文件 debug error 等分开写
// configJSON, err1 := json.Marshal(config)
// if err1 != nil {
// err = err1
// err = logs.SetLogger(logs.AdapterMultiFile, `{"filename":"longagent.log"}`)
// } else {
// err = logs.SetLogger(logs.AdapterMultiFile, string(configJSON))
// }
//现在为了调试方便使用 输出到终端
logs.SetLogger(logs.AdapterConsole)
return
}
//初始化etcd
func initetcd(endpoint []string, key string) (result map[string]string, err error) {
cli, err := clientv3.New(clientv3.Config{
Endpoints: endpoint,
DialTimeout: 5 * time.Second,
})
result = make(map[string]string, len(key))
if err != nil {
logs.Error("etcd clientv3.New err", err)
return
}
logs.Debug("etcd clientv3.New success")
defer cli.Close()
//获取key所对应的值
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
resp, err1 := cli.Get(ctx, key)
logs.Debug("etcd get key=%s sucess\n", key)
cancel()
if err1 != nil {
logs.Error("cli.Get err", err1)
err = err1
}
for _, ev := range resp.Kvs {
logs.Debug("etcd get key=%s ,value=%s\n", ev.Key, ev.Value)
result[string(ev.Key)] = string(ev.Value)
}
return
}
//获取kafka 跟日志路径后 并检测其变化
func watchetcdkey(endpoint []string, key string) {
//defer wg.Done()
fmt.Println("watchetcdkey keys", key)
result := make(map[string]string, len(key))
cli, err := clientv3.New(clientv3.Config{
Endpoints: endpoint,
DialTimeout: 5 * time.Second,
})
if err != nil {
fmt.Println("clientv3.New err", err)
}
defer cli.Close()
//是否需要更新etcd的值
b := false
//监听该值的变化
fmt.Println("watching keys", key)
rch := cli.Watch(context.Background(), key, clientv3.WithPrefix())
var (
k, v string
)
for wresp := range rch {
for _, ev := range wresp.Events {
k = string(ev.Kv.Key)
v = string(ev.Kv.Value)
if k == AppConfig.etcdkeycollect {
switch ev.Type {
case mvccpb.DELETE:
logs.Error(fmt.Sprintf("key is DELETE,key=:%s", k))
result[string(ev.Kv.Key)] = "DELETE"
b = true
case mvccpb.PUT:
logs.Debug(fmt.Sprintf("key is update,key=:%s", k))
if err != nil {
logs.Error(fmt.Sprintf("cli.Watch getkey,key:%s, err:%s", k, err))
} else {
b = true
result[k] = v
// fmt.Println("updateKeys ", result)
// updateKeys(&result)
// logs.Debug(fmt.Sprintf("updateKeys:%v ", result))
}
default:
logs.Debug(fmt.Sprintf("%s %q :%q \n", ev.Type, ev.Kv.Key, ev.Kv.Value))
}
}
}
if b {
updateKeys(&result)
}
}
// if b {
// fmt.Println("updateKeys ", result)
// updateKeys(&result)
// logs.Debug(fmt.Sprintf("updateKeys:%v ", result))
// }
}
func updateKeys(result *map[string]string) {
logs.Debug("updateKeys:%v ", result)
endpoints := []string{AppConfig.kafkaAddr}
for _, v := range *result {
if v != "DELETE" {
var collectTemplist []CollectionInfo
err := json.Unmarshal([]byte(v), &collectTemplist)
if err != nil {
logs.Error("json Unmarshal etcdkeycollect err", err)
return
}
logs.Debug("update keys after json.Unmarshal collectTemplist:", collectTemplist)
//停止现有的
for i := 0; i < len(CollectList); i++ {
logs.Debug("stop current goroutine path :%s", CollectList[i].Path)
CollectList[i].update <- true
}
//清除CollectList
CollectList = append(CollectList, CollectList[:0]...)
CollectList = collectTemplist
logs.Debug("new CollectList =======:", CollectList)
for i := 0; i < len(CollectList); i++ {
if CollectList[i].update == nil {
CollectList[i].update = make(chan bool)
}
wg.Add(1)
lines := make(chan *tail.Line)
logs.Debug("start update new address ", CollectList[i].Path)
go readLog(lines, &CollectList[i])
// 读取出来,放到kafka上即可
go sendMsg(lines, &CollectList[i], endpoints)
}
logs.Debug("update keys read send log CollectList:", CollectList)
} else {
//停止被删除路径的读取
}
}
}
//读取相应路径下的日志
func readLog(msgchan chan *tail.Line, collectionInfo *CollectionInfo) {
logs.Debug("tail.TailFile init CollectionInfo:%v addr is %p", collectionInfo | onfig.etcd | identifier_name |
articlesearch.py | == 200:
raw_json = json.loads(await response.read())
with open("Settings.json", "w+") as file:
json.dump(raw_json, file, indent=2)
with open("Settings.json") as file:
return json.load(file)
async def connect(host: str = "localhost", database: str = "postgres", user: str = "postgres",
port: int = None, password: str = None, passfile=None, ssl: bool = False, use_file: bool = True):
"""Connects to a database"""
if use_file:
# Load Settings
settings = await fetch_settings()
host = settings["host"]
database = settings["database"]
user = settings["user"]
passfile = settings["passfile"]
password = settings["password"]
ssl = settings["ssl"]
port = settings["port"]
return await asyncpg.connect(host=host, port=port, user=user, password=password, passfile=passfile,
database=database, ssl=ssl)
async def update():
"""Looks for new articles."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
async with aiohttp.ClientSession() as session:
async with session.get("https://community.elitedangerous.com/") as response:
html = Bs4(await response.text(), "html.parser")
connection = await connect()
uids = []
new_articles = set()
uid_records = await connection.fetch(f"""
SELECT "UID" FROM "{table}" ORDER BY "dateReleased" DESC LIMIT 50;
""")
for record in uid_records:
uids.append(record["UID"])
for entry in html.find_all("h3", {"class": "hiLite galnetNewsArticleTitle"}):
entry = entry.find("a").get("href")[re.search("^/galnet/uid/", entry.find("a").get("href")).end():]
if entry not in uids:
new_articles.add(entry)
added = []
for article in new_articles:
date_today = datetime.datetime.now()
async with aiohttp.ClientSession() as session:
async with session.get(f"https://community.elitedangerous.com/galnet/uid/{article}") as response:
bs4 = Bs4(await response.text(), "html.parser")
entry = bs4.find("h3", {"class": "hiLite galnetNewsArticleTitle"})
# Article Content
entry_title = entry.get_text().strip().replace("'", "''")
if entry_title == "" or entry_title is None: |
# Date info
date_article = bs4.find("p").get_text()
date_article = datetime.datetime.strptime(date_article, "%d %b %Y")
if date_article.year >= 3300:
date_article = date_article.replace(year=(date_article.year - GAME_YEAR_OFFSET))
added.append(article)
await connection.execute(f"""
INSERT INTO "{table}"("Title", "UID", "dateReleased", "dateAdded", "Text") VALUES (
$1, $2, $3, $4, $5);
""", entry_title, article, date_article, date_today, text)
await connection.close()
if len(new_articles) > 0:
return len(added), added
async def search(terms):
"""Searches the DB for given input.
Options:
--title: Searches only in the titles of the articles (default search mode)
--content: Searches only in the content of an article, and ignores the title
--searchall: Searches both title and content of an article
--searchreverse: Searches the DB from the oldest article
--limit: Returns only the latest results up to number given (default 5). Format: limit=XYZ
--limitall: Returns all results found
--before: Looks for articles that were written before a given date. Format: YYYY-MM-DD
--after: Looks for articles that were written after a given date. Format: YYYY-MM-DD
If both the --after & --before tags are given, the search is limited to the dates between both options."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
if ";" in terms:
terms.replace(";", "")
return "You can't use ';' in your searches!"
terms = terms.split(" ")
options = []
words = []
results = []
limit = 5
searchorder = "DESC"
datebegin = "0000-00-00"
dateend = "4000-01-01"
# Separating Options and Search Terms
for item in terms:
if "--" in item[:2]:
option = item.replace("--", "")
if option == "limitall" or option == "listall":
limit = 10000000
elif "limit" in option:
try:
limit = int(option[6:])
except ValueError:
limit = 5
elif "before" in option:
year = datetime.datetime.strptime(option[7:], "%Y-%m-%d").year
# Convert date to format stored table
if year >= 3300:
converted_year = str(year - GAME_YEAR_OFFSET) + option[11:]
dateend = datetime.datetime.strptime(converted_year, "%Y-%m-%d")
else:
dateend = datetime.datetime.strptime(option[7:], "%Y-%m-%d")
options.append("before")
elif "after" in option:
year = datetime.datetime.strptime(option[6:], "%Y-%m-%d").year
# Convert date to format stored in table
if year >= 3300:
converted_year = str(year - GAME_YEAR_OFFSET) + option[10:]
datebegin = datetime.datetime.strptime(converted_year, "%Y-%m-%d")
else:
datebegin = datetime.datetime.strptime(option[6:], "%Y-%m-%d")
options.append("after")
elif option == "searchreverse":
searchorder = "ASC"
else:
options.append(option)
else:
words.append(item.lower())
# Searching
connection = await connect()
if "before" in options and "after" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" BETWEEN $1 AND $2
ORDER BY "dateReleased" {searchorder};
""", datebegin, dateend)
elif "before" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" < $1
ORDER BY "dateReleased" {searchorder};
""", dateend)
elif "after" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" > $1
ORDER BY "dateReleased" {searchorder};
""", datebegin)
else:
rows = await connection.fetch(f"""
SELECT * FROM "{table}" ORDER BY "dateReleased" {searchorder};
""")
await connection.close()
if "searchall" in options:
for row in rows:
for word in words:
if word in row["Title"].lower():
results.append(row)
if word in row["Text"].lower():
if row in results:
pass
else:
results.append(row)
elif "content" in options:
for row in rows:
for word in words:
if word in row["Text"].lower():
results.append(row)
else:
for row in rows:
for word in words:
if word in row["Title"].lower():
results.append(row)
return results[:limit], len(results)
async def read(articleid=True, uid=False):
"""Returns the article with the matching ID.
If the input is invalid or the article is not found, empty list is returned."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
if uid:
connection = await connect()
row = await connection.fetch(f"""
SELECT * FROM "{table}" WHERE "UID" = $1;
""", str(uid))
await connection.close()
return row
try:
articleid = int(articleid)
except ValueError:
return []
connection = await connect()
rows = await connection.fetch(f"""
SELECT * FROM "{table}" WHERE "ID" = $1;
""", articleid)
await connection.close()
result = []
for row in rows:
row_dict = dict(row)
row_dict["dateReleased"] = row["dateReleased"].replace(year=(row["dateReleased"].year + GAME_YEAR_OFFSET))
result.append(row_dict)
return result
async def count(options):
"""Counts the amount of articles that fit the given conditions.
Options:
--title: Counts the amount of articles that contain a certain term in the title.
--content: Counts the amount of articles that contain a certain term only in their content.
--all: Counts the amount of articles that contain a certain term in either the title or the content.
--before: Counts the amount of articles before a given date. Format: YYYY-MM-DD
--after: Counts the amount of articles after a given date. Format | entry_title = "No Title Available"
text = unquote(bs4.find_all("p")[1].get_text().replace("'", "''")) | random_line_split |
articlesearch.py | == 200:
raw_json = json.loads(await response.read())
with open("Settings.json", "w+") as file:
json.dump(raw_json, file, indent=2)
with open("Settings.json") as file:
return json.load(file)
async def connect(host: str = "localhost", database: str = "postgres", user: str = "postgres",
port: int = None, password: str = None, passfile=None, ssl: bool = False, use_file: bool = True):
"""Connects to a database"""
if use_file:
# Load Settings
settings = await fetch_settings()
host = settings["host"]
database = settings["database"]
user = settings["user"]
passfile = settings["passfile"]
password = settings["password"]
ssl = settings["ssl"]
port = settings["port"]
return await asyncpg.connect(host=host, port=port, user=user, password=password, passfile=passfile,
database=database, ssl=ssl)
async def update():
"""Looks for new articles."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
async with aiohttp.ClientSession() as session:
async with session.get("https://community.elitedangerous.com/") as response:
html = Bs4(await response.text(), "html.parser")
connection = await connect()
uids = []
new_articles = set()
uid_records = await connection.fetch(f"""
SELECT "UID" FROM "{table}" ORDER BY "dateReleased" DESC LIMIT 50;
""")
for record in uid_records:
uids.append(record["UID"])
for entry in html.find_all("h3", {"class": "hiLite galnetNewsArticleTitle"}):
entry = entry.find("a").get("href")[re.search("^/galnet/uid/", entry.find("a").get("href")).end():]
if entry not in uids:
new_articles.add(entry)
added = []
for article in new_articles:
date_today = datetime.datetime.now()
async with aiohttp.ClientSession() as session:
async with session.get(f"https://community.elitedangerous.com/galnet/uid/{article}") as response:
bs4 = Bs4(await response.text(), "html.parser")
entry = bs4.find("h3", {"class": "hiLite galnetNewsArticleTitle"})
# Article Content
entry_title = entry.get_text().strip().replace("'", "''")
if entry_title == "" or entry_title is None:
entry_title = "No Title Available"
text = unquote(bs4.find_all("p")[1].get_text().replace("'", "''"))
# Date info
date_article = bs4.find("p").get_text()
date_article = datetime.datetime.strptime(date_article, "%d %b %Y")
if date_article.year >= 3300:
date_article = date_article.replace(year=(date_article.year - GAME_YEAR_OFFSET))
added.append(article)
await connection.execute(f"""
INSERT INTO "{table}"("Title", "UID", "dateReleased", "dateAdded", "Text") VALUES (
$1, $2, $3, $4, $5);
""", entry_title, article, date_article, date_today, text)
await connection.close()
if len(new_articles) > 0:
return len(added), added
async def search(terms):
"""Searches the DB for given input.
Options:
--title: Searches only in the titles of the articles (default search mode)
--content: Searches only in the content of an article, and ignores the title
--searchall: Searches both title and content of an article
--searchreverse: Searches the DB from the oldest article
--limit: Returns only the latest results up to number given (default 5). Format: limit=XYZ
--limitall: Returns all results found
--before: Looks for articles that were written before a given date. Format: YYYY-MM-DD
--after: Looks for articles that were written after a given date. Format: YYYY-MM-DD
If both the --after & --before tags are given, the search is limited to the dates between both options."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
if ";" in terms:
terms.replace(";", "")
return "You can't use ';' in your searches!"
terms = terms.split(" ")
options = []
words = []
results = []
limit = 5
searchorder = "DESC"
datebegin = "0000-00-00"
dateend = "4000-01-01"
# Separating Options and Search Terms
for item in terms:
if "--" in item[:2]:
option = item.replace("--", "")
if option == "limitall" or option == "listall":
limit = 10000000
elif "limit" in option:
try:
limit = int(option[6:])
except ValueError:
limit = 5
elif "before" in option:
|
elif "after" in option:
year = datetime.datetime.strptime(option[6:], "%Y-%m-%d").year
# Convert date to format stored in table
if year >= 3300:
converted_year = str(year - GAME_YEAR_OFFSET) + option[10:]
datebegin = datetime.datetime.strptime(converted_year, "%Y-%m-%d")
else:
datebegin = datetime.datetime.strptime(option[6:], "%Y-%m-%d")
options.append("after")
elif option == "searchreverse":
searchorder = "ASC"
else:
options.append(option)
else:
words.append(item.lower())
# Searching
connection = await connect()
if "before" in options and "after" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" BETWEEN $1 AND $2
ORDER BY "dateReleased" {searchorder};
""", datebegin, dateend)
elif "before" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" < $1
ORDER BY "dateReleased" {searchorder};
""", dateend)
elif "after" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" > $1
ORDER BY "dateReleased" {searchorder};
""", datebegin)
else:
rows = await connection.fetch(f"""
SELECT * FROM "{table}" ORDER BY "dateReleased" {searchorder};
""")
await connection.close()
if "searchall" in options:
for row in rows:
for word in words:
if word in row["Title"].lower():
results.append(row)
if word in row["Text"].lower():
if row in results:
pass
else:
results.append(row)
elif "content" in options:
for row in rows:
for word in words:
if word in row["Text"].lower():
results.append(row)
else:
for row in rows:
for word in words:
if word in row["Title"].lower():
results.append(row)
return results[:limit], len(results)
async def read(articleid=True, uid=False):
"""Returns the article with the matching ID.
If the input is invalid or the article is not found, empty list is returned."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
if uid:
connection = await connect()
row = await connection.fetch(f"""
SELECT * FROM "{table}" WHERE "UID" = $1;
""", str(uid))
await connection.close()
return row
try:
articleid = int(articleid)
except ValueError:
return []
connection = await connect()
rows = await connection.fetch(f"""
SELECT * FROM "{table}" WHERE "ID" = $1;
""", articleid)
await connection.close()
result = []
for row in rows:
row_dict = dict(row)
row_dict["dateReleased"] = row["dateReleased"].replace(year=(row["dateReleased"].year + GAME_YEAR_OFFSET))
result.append(row_dict)
return result
async def count(options):
"""Counts the amount of articles that fit the given conditions.
Options:
--title: Counts the amount of articles that contain a certain term in the title.
--content: Counts the amount of articles that contain a certain term only in their content.
--all: Counts the amount of articles that contain a certain term in either the title or the content.
--before: Counts the amount of articles before a given date. Format: YYYY-MM-DD
--after: Counts the amount of articles after a given date. Format | year = datetime.datetime.strptime(option[7:], "%Y-%m-%d").year
# Convert date to format stored table
if year >= 3300:
converted_year = str(year - GAME_YEAR_OFFSET) + option[11:]
dateend = datetime.datetime.strptime(converted_year, "%Y-%m-%d")
else:
dateend = datetime.datetime.strptime(option[7:], "%Y-%m-%d")
options.append("before") | conditional_block |
articlesearch.py |
async def fetch_settings():
if not os.path.exists("Settings.json"):
async with aiohttp.ClientSession() as settings_session:
async with settings_session.get(
"https://raw.githubusercontent.com/HassanAbouelela/Galnet-Newsfeed/"
"4499a01e6b5a679b807e95697effafde02f8d5e0/python/Settings.json") as response:
if response.status == 200:
raw_json = json.loads(await response.read())
with open("Settings.json", "w+") as file:
json.dump(raw_json, file, indent=2)
with open("Settings.json") as file:
return json.load(file)
async def connect(host: str = "localhost", database: str = "postgres", user: str = "postgres",
port: int = None, password: str = None, passfile=None, ssl: bool = False, use_file: bool = True):
"""Connects to a database"""
if use_file:
# Load Settings
settings = await fetch_settings()
host = settings["host"]
database = settings["database"]
user = settings["user"]
passfile = settings["passfile"]
password = settings["password"]
ssl = settings["ssl"]
port = settings["port"]
return await asyncpg.connect(host=host, port=port, user=user, password=password, passfile=passfile,
database=database, ssl=ssl)
async def update():
"""Looks for new articles."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
async with aiohttp.ClientSession() as session:
async with session.get("https://community.elitedangerous.com/") as response:
html = Bs4(await response.text(), "html.parser")
connection = await connect()
uids = []
new_articles = set()
uid_records = await connection.fetch(f"""
SELECT "UID" FROM "{table}" ORDER BY "dateReleased" DESC LIMIT 50;
""")
for record in uid_records:
uids.append(record["UID"])
for entry in html.find_all("h3", {"class": "hiLite galnetNewsArticleTitle"}):
entry = entry.find("a").get("href")[re.search("^/galnet/uid/", entry.find("a").get("href")).end():]
if entry not in uids:
new_articles.add(entry)
added = []
for article in new_articles:
date_today = datetime.datetime.now()
async with aiohttp.ClientSession() as session:
async with session.get(f"https://community.elitedangerous.com/galnet/uid/{article}") as response:
bs4 = Bs4(await response.text(), "html.parser")
entry = bs4.find("h3", {"class": "hiLite galnetNewsArticleTitle"})
# Article Content
entry_title = entry.get_text().strip().replace("'", "''")
if entry_title == "" or entry_title is None:
entry_title = "No Title Available"
text = unquote(bs4.find_all("p")[1].get_text().replace("'", "''"))
# Date info
date_article = bs4.find("p").get_text()
date_article = datetime.datetime.strptime(date_article, "%d %b %Y")
if date_article.year >= 3300:
date_article = date_article.replace(year=(date_article.year - GAME_YEAR_OFFSET))
added.append(article)
await connection.execute(f"""
INSERT INTO "{table}"("Title", "UID", "dateReleased", "dateAdded", "Text") VALUES (
$1, $2, $3, $4, $5);
""", entry_title, article, date_article, date_today, text)
await connection.close()
if len(new_articles) > 0:
return len(added), added
async def search(terms):
"""Searches the DB for given input.
Options:
--title: Searches only in the titles of the articles (default search mode)
--content: Searches only in the content of an article, and ignores the title
--searchall: Searches both title and content of an article
--searchreverse: Searches the DB from the oldest article
--limit: Returns only the latest results up to number given (default 5). Format: limit=XYZ
--limitall: Returns all results found
--before: Looks for articles that were written before a given date. Format: YYYY-MM-DD
--after: Looks for articles that were written after a given date. Format: YYYY-MM-DD
If both the --after & --before tags are given, the search is limited to the dates between both options."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
if ";" in terms:
terms.replace(";", "")
return "You can't use ';' in your searches!"
terms = terms.split(" ")
options = []
words = []
results = []
limit = 5
searchorder = "DESC"
datebegin = "0000-00-00"
dateend = "4000-01-01"
# Separating Options and Search Terms
for item in terms:
if "--" in item[:2]:
option = item.replace("--", "")
if option == "limitall" or option == "listall":
limit = 10000000
elif "limit" in option:
try:
limit = int(option[6:])
except ValueError:
limit = 5
elif "before" in option:
year = datetime.datetime.strptime(option[7:], "%Y-%m-%d").year
# Convert date to format stored table
if year >= 3300:
converted_year = str(year - GAME_YEAR_OFFSET) + option[11:]
dateend = datetime.datetime.strptime(converted_year, "%Y-%m-%d")
else:
dateend = datetime.datetime.strptime(option[7:], "%Y-%m-%d")
options.append("before")
elif "after" in option:
year = datetime.datetime.strptime(option[6:], "%Y-%m-%d").year
# Convert date to format stored in table
if year >= 3300:
converted_year = str(year - GAME_YEAR_OFFSET) + option[10:]
datebegin = datetime.datetime.strptime(converted_year, "%Y-%m-%d")
else:
datebegin = datetime.datetime.strptime(option[6:], "%Y-%m-%d")
options.append("after")
elif option == "searchreverse":
searchorder = "ASC"
else:
options.append(option)
else:
words.append(item.lower())
# Searching
connection = await connect()
if "before" in options and "after" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" BETWEEN $1 AND $2
ORDER BY "dateReleased" {searchorder};
""", datebegin, dateend)
elif "before" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" < $1
ORDER BY "dateReleased" {searchorder};
""", dateend)
elif "after" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" > $1
ORDER BY "dateReleased" {searchorder};
""", datebegin)
else:
rows = await connection.fetch(f"""
SELECT * FROM "{table}" ORDER BY "dateReleased" {searchorder};
""")
await connection.close()
if "searchall" in options:
for row in rows:
for word in words:
if word in row["Title"].lower():
results.append(row)
if word in row["Text"].lower():
if row in results:
pass
else:
results.append(row)
elif "content" in options:
for row in rows:
for word in words:
if word in row["Text"].lower():
results.append(row)
else:
for row in rows:
for word in words:
if word in row["Title"].lower():
results.append(row)
return results[:limit], len(results)
async def read(articleid=True, uid=False):
"""Returns the article with the matching ID.
If the input is invalid or the article is not found, empty list is returned."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
if uid:
connection = await connect()
row = await connection.fetch(f"""
SELECT * FROM "{table}" WHERE "UID" = $1;
""", str(uid))
await connection.close()
return row
try:
articleid = int(articleid)
except ValueError | old_settings = await fetch_settings()
try:
old = old_settings["version"]
except KeyError:
old = "1.0"
new_settings = await fetch_settings()
os.remove("Settings.json")
for key in old_settings.keys():
if key in new_settings.keys():
new_settings[key] = old_settings[key]
new_settings["previous version"] = old
with open("Settings.json", "w") as settings_file:
json.dump(new_settings, settings_file, indent=2) | identifier_body |
|
articlesearch.py | == 200:
raw_json = json.loads(await response.read())
with open("Settings.json", "w+") as file:
json.dump(raw_json, file, indent=2)
with open("Settings.json") as file:
return json.load(file)
async def connect(host: str = "localhost", database: str = "postgres", user: str = "postgres",
port: int = None, password: str = None, passfile=None, ssl: bool = False, use_file: bool = True):
"""Connects to a database"""
if use_file:
# Load Settings
settings = await fetch_settings()
host = settings["host"]
database = settings["database"]
user = settings["user"]
passfile = settings["passfile"]
password = settings["password"]
ssl = settings["ssl"]
port = settings["port"]
return await asyncpg.connect(host=host, port=port, user=user, password=password, passfile=passfile,
database=database, ssl=ssl)
async def | ():
"""Looks for new articles."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
async with aiohttp.ClientSession() as session:
async with session.get("https://community.elitedangerous.com/") as response:
html = Bs4(await response.text(), "html.parser")
connection = await connect()
uids = []
new_articles = set()
uid_records = await connection.fetch(f"""
SELECT "UID" FROM "{table}" ORDER BY "dateReleased" DESC LIMIT 50;
""")
for record in uid_records:
uids.append(record["UID"])
for entry in html.find_all("h3", {"class": "hiLite galnetNewsArticleTitle"}):
entry = entry.find("a").get("href")[re.search("^/galnet/uid/", entry.find("a").get("href")).end():]
if entry not in uids:
new_articles.add(entry)
added = []
for article in new_articles:
date_today = datetime.datetime.now()
async with aiohttp.ClientSession() as session:
async with session.get(f"https://community.elitedangerous.com/galnet/uid/{article}") as response:
bs4 = Bs4(await response.text(), "html.parser")
entry = bs4.find("h3", {"class": "hiLite galnetNewsArticleTitle"})
# Article Content
entry_title = entry.get_text().strip().replace("'", "''")
if entry_title == "" or entry_title is None:
entry_title = "No Title Available"
text = unquote(bs4.find_all("p")[1].get_text().replace("'", "''"))
# Date info
date_article = bs4.find("p").get_text()
date_article = datetime.datetime.strptime(date_article, "%d %b %Y")
if date_article.year >= 3300:
date_article = date_article.replace(year=(date_article.year - GAME_YEAR_OFFSET))
added.append(article)
await connection.execute(f"""
INSERT INTO "{table}"("Title", "UID", "dateReleased", "dateAdded", "Text") VALUES (
$1, $2, $3, $4, $5);
""", entry_title, article, date_article, date_today, text)
await connection.close()
if len(new_articles) > 0:
return len(added), added
async def search(terms):
"""Searches the DB for given input.
Options:
--title: Searches only in the titles of the articles (default search mode)
--content: Searches only in the content of an article, and ignores the title
--searchall: Searches both title and content of an article
--searchreverse: Searches the DB from the oldest article
--limit: Returns only the latest results up to number given (default 5). Format: limit=XYZ
--limitall: Returns all results found
--before: Looks for articles that were written before a given date. Format: YYYY-MM-DD
--after: Looks for articles that were written after a given date. Format: YYYY-MM-DD
If both the --after & --before tags are given, the search is limited to the dates between both options."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
if ";" in terms:
terms.replace(";", "")
return "You can't use ';' in your searches!"
terms = terms.split(" ")
options = []
words = []
results = []
limit = 5
searchorder = "DESC"
datebegin = "0000-00-00"
dateend = "4000-01-01"
# Separating Options and Search Terms
for item in terms:
if "--" in item[:2]:
option = item.replace("--", "")
if option == "limitall" or option == "listall":
limit = 10000000
elif "limit" in option:
try:
limit = int(option[6:])
except ValueError:
limit = 5
elif "before" in option:
year = datetime.datetime.strptime(option[7:], "%Y-%m-%d").year
# Convert date to format stored table
if year >= 3300:
converted_year = str(year - GAME_YEAR_OFFSET) + option[11:]
dateend = datetime.datetime.strptime(converted_year, "%Y-%m-%d")
else:
dateend = datetime.datetime.strptime(option[7:], "%Y-%m-%d")
options.append("before")
elif "after" in option:
year = datetime.datetime.strptime(option[6:], "%Y-%m-%d").year
# Convert date to format stored in table
if year >= 3300:
converted_year = str(year - GAME_YEAR_OFFSET) + option[10:]
datebegin = datetime.datetime.strptime(converted_year, "%Y-%m-%d")
else:
datebegin = datetime.datetime.strptime(option[6:], "%Y-%m-%d")
options.append("after")
elif option == "searchreverse":
searchorder = "ASC"
else:
options.append(option)
else:
words.append(item.lower())
# Searching
connection = await connect()
if "before" in options and "after" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" BETWEEN $1 AND $2
ORDER BY "dateReleased" {searchorder};
""", datebegin, dateend)
elif "before" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" < $1
ORDER BY "dateReleased" {searchorder};
""", dateend)
elif "after" in options:
rows = await connection.fetch(f"""
SELECT * FROM "{table}"
WHERE "dateReleased" > $1
ORDER BY "dateReleased" {searchorder};
""", datebegin)
else:
rows = await connection.fetch(f"""
SELECT * FROM "{table}" ORDER BY "dateReleased" {searchorder};
""")
await connection.close()
if "searchall" in options:
for row in rows:
for word in words:
if word in row["Title"].lower():
results.append(row)
if word in row["Text"].lower():
if row in results:
pass
else:
results.append(row)
elif "content" in options:
for row in rows:
for word in words:
if word in row["Text"].lower():
results.append(row)
else:
for row in rows:
for word in words:
if word in row["Title"].lower():
results.append(row)
return results[:limit], len(results)
async def read(articleid=True, uid=False):
"""Returns the article with the matching ID.
If the input is invalid or the article is not found, empty list is returned."""
# Load Settings
settings = await fetch_settings()
table = settings["table"]
if uid:
connection = await connect()
row = await connection.fetch(f"""
SELECT * FROM "{table}" WHERE "UID" = $1;
""", str(uid))
await connection.close()
return row
try:
articleid = int(articleid)
except ValueError:
return []
connection = await connect()
rows = await connection.fetch(f"""
SELECT * FROM "{table}" WHERE "ID" = $1;
""", articleid)
await connection.close()
result = []
for row in rows:
row_dict = dict(row)
row_dict["dateReleased"] = row["dateReleased"].replace(year=(row["dateReleased"].year + GAME_YEAR_OFFSET))
result.append(row_dict)
return result
async def count(options):
"""Counts the amount of articles that fit the given conditions.
Options:
--title: Counts the amount of articles that contain a certain term in the title.
--content: Counts the amount of articles that contain a certain term only in their content.
--all: Counts the amount of articles that contain a certain term in either the title or the content.
--before: Counts the amount of articles before a given date. Format: YYYY-MM-DD
--after: Counts the amount of articles after a given date. Format | update | identifier_name |
ppo_fetch_reach.py |
class ActorCritic(nn.Module):
'''
continous action space PG methods the actor returns tensor of lenght number of action
and those tensors are mean and covariance -- use Gaussian Distribution to Sample from action space
covarinace is used to balance exploration-exploitation problem
'''
def __init__(self, state_dim, action_dim, action_std):
super(ActorCritic, self).__init__()
# action mean range -1 to 1
self.actor = nn.Sequential(
nn.Linear(state_dim, 256),
nn.ReLU(),
nn.Linear(256, 256),
nn.ReLU(),
nn.Linear(256,256),
nn.ReLU(),
nn.Linear(256, action_dim),
nn.Tanh()
)
#the output of network is the mean of actions
# critic
self.critic = nn.Sequential(
nn.Linear(state_dim, 256),
nn.ReLU(),
nn.Linear(256, 256),
nn.ReLU(),
nn.Linear(256, 256),
nn.ReLU(),
nn.Linear(256, 1)
)
#torch.full returns torch_tensor of size action_dim and filled with value action_std*action_std
#action_var --> variance of the action
self.action_var = torch.full((action_dim,), action_std*action_std).to(device)
def forward(self):
raise NotImplementedError
def act(self, state, memory):
#
action_mean = self.actor(state)
#the covariance is 2D diagonal array of action variance
#torch.diagonal --> since action_var is one_dim tensor torch.diag returns 2D array with tensor's values as main diagonal
cov_mat = torch.diag(self.action_var).to(device)
#sample actions from Gaussian Distribution with mean = action_mean, and covariance = action variance, cov_mat
dist = MultivariateNormal(action_mean, cov_mat)
#sample action
action = dist.sample()
action_logprob = dist.log_prob(action)
memory.states.append(state)
memory.actions.append(action)
memory.logprobs.append(action_logprob)
return action.detach()
def evaluate(self, state, action):
action_mean = self.actor(state)
action_var = self.action_var.expand_as(action_mean)
#torch.diag_embed returns 2D diagnoal array with tensor's elements as main diagonal
cov_mat = torch.diag_embed(action_var).to(device)
dist = MultivariateNormal(action_mean, cov_mat)
# its probablitis not values Pi(a|s)
action_logprobs = dist.log_prob(action)
dist_entropy = dist.entropy()
state_value = self.critic(state)
return action_logprobs, torch.squeeze(state_value), dist_entropy
class PPO:
def __init__(self, state_dim, action_dim, action_std, lr, betas, gamma, K_epochs, eps_clip):
self.lr = lr
self.betas = betas
self.gamma = gamma
self.eps_clip = eps_clip
self.K_epochs = K_epochs
self.policy = ActorCritic(state_dim, action_dim, action_std).to(device)
self.optimizer = torch.optim.Adam(self.policy.parameters(), lr=lr, betas=betas)
self.policy_old = ActorCritic(state_dim, action_dim, action_std).to(device)
self.policy_old.load_state_dict(self.policy.state_dict())
self.MseLoss = nn.MSELoss()
def select_action(self, state, memory):
#state.reshape --> change state into vector - single row - of compitable length with the orignal state
state = torch.FloatTensor(state.reshape(1, -1)).to(device)
return self.policy_old.act(state, memory).cpu().data.numpy().flatten()
def update(self, memory):
# Monte Carlo estimate of rewards:
rewards = []
discounted_reward = 0
#loop through all episodes in the epoch
for reward, is_terminal in zip(reversed(memory.rewards), reversed(memory.is_terminals)):
if is_terminal:
discounted_reward = 0
discounted_reward = reward + (self.gamma * discounted_reward)
#insert the new discounted reward - after completing the episode - in index number 0 and push old ones
rewards.insert(0, discounted_reward)
# Normalizing the rewards:
rewards = torch.tensor(rewards).to(device)
rewards = (rewards - rewards.mean()) / (rewards.std() + 1e-5)
# convert list to tensor
old_states = torch.squeeze(torch.stack(memory.states).to(device), 1).detach()
old_actions = torch.squeeze(torch.stack(memory.actions).to(device), 1).detach()
old_logprobs = torch.squeeze(torch.stack(memory.logprobs), 1).to(device).detach()
# Optimize policy for K epochs:
for _ in range(self.K_epochs):
# Evaluating old actions and values :
logprobs, state_values, dist_entropy = self.policy.evaluate(old_states, old_actions)
# Finding the ratio (pi_theta / pi_theta__old):
#exp(log(prob)) = prob
ratios = torch.exp(logprobs - old_logprobs.detach())
# Finding Surrogate Loss:
advantages = rewards - state_values.detach()
surr1 = ratios * advantages
surr2 = torch.clamp(ratios, 1-self.eps_clip, 1+self.eps_clip) * advantages
#total_loss_function = l_clip, loss_critic, loss_entropy
loss = -torch.min(surr1, surr2) + 0.5*self.MseLoss(state_values, rewards) - 0.01*dist_entropy
# take gradient step
self.optimizer.zero_grad()
loss.mean().backward()
self.optimizer.step()
# Copy new weights into old policy:
self.policy_old.load_state_dict(self.policy.state_dict())
epoch_success_rate = []
clip_obs = 100
def _preproc_og( o, g):
#(o - o.mean())/o.std()
o = np.clip(o, -clip_obs, clip_obs)
#(g-g.mean())/g.std()
g = np.clip(g, -clip_obs, clip_obs)
return o, g
def launch():
############## Hyperparameters ##############
env_name = "HandManipulateEggFull-v0"
render = False
solved_reward = 300 # stop training if avg_reward > solved_reward
log_interval = 20 # print avg reward in the interval
max_episodes = 800 # max training episodes
max_timesteps = 1500 # max timesteps in one episode
update_timestep = 400 # update policy every n timesteps
action_std = 0.5 # constant std for action distribution (Multivariate Normal)
K_epochs = 40 # update policy for K epochs
eps_clip = 0.24 # clip parameter for PPO
gamma = 0.99 # discount factor
lr = 0.00101 # parameters for Adam optimizer
betas = (0.9, 0.999)
random_seed = 123
#############################################
# creating environment
env = gym.make(env_name)
state_dim_pre = env.observation_space['observation'].shape[0]
goal_dim = env.observation_space['desired_goal'].shape[0]
state_dim = state_dim_pre + goal_dim
action_dim = env.action_space.shape[0]
env.seed(123)
np.random.seed(123)
torch.manual_seed(123)
'''
if random_seed:
print("Random Seed: {}".format(random_seed))
torch.manual_seed(random_seed)
env.seed(random_seed)
np.random.seed(random_seed)
'''
memory = Memory()
ppo = PPO(state_dim, action_dim, action_std, lr, betas, gamma, K_epochs, eps_clip)
print(lr,betas)
# logging variables
running_reward = 0
avg_length = 0
time_step = 0
# training loop
for epoch in range(350):
running_reward = 0
avg_length = 0
time_step = 0
for i_episode in range(1, max_episodes+1):
episode_success_rate = []
state_ = env.reset()
env.env.reward_type = 'dense'
obs = state_['observation']
g = state_['desired_goal']
#clip the oberservation and goal into range -200, 200
#obs, g = _preproc_og(obs, g)
state = np.concatenate([obs,g])
local_timestep = | def __init__(self):
self.actions = []
#in our case self.inputs ==> input state//goal
self.states = []
self.logprobs = []
self.rewards = []
self.is_terminals = []
def clear_memory(self):
#in ddpg you can learn from data stored from older epochs
del self.actions[:]
del self.states[:]
del self.logprobs[:]
del self.rewards[:]
del self.is_terminals[:] | identifier_body |
|
ppo_fetch_reach.py | (reversed(memory.rewards), reversed(memory.is_terminals)):
if is_terminal:
discounted_reward = 0
discounted_reward = reward + (self.gamma * discounted_reward)
#insert the new discounted reward - after completing the episode - in index number 0 and push old ones
rewards.insert(0, discounted_reward)
# Normalizing the rewards:
rewards = torch.tensor(rewards).to(device)
rewards = (rewards - rewards.mean()) / (rewards.std() + 1e-5)
# convert list to tensor
old_states = torch.squeeze(torch.stack(memory.states).to(device), 1).detach()
old_actions = torch.squeeze(torch.stack(memory.actions).to(device), 1).detach()
old_logprobs = torch.squeeze(torch.stack(memory.logprobs), 1).to(device).detach()
# Optimize policy for K epochs:
for _ in range(self.K_epochs):
# Evaluating old actions and values :
logprobs, state_values, dist_entropy = self.policy.evaluate(old_states, old_actions)
# Finding the ratio (pi_theta / pi_theta__old):
#exp(log(prob)) = prob
ratios = torch.exp(logprobs - old_logprobs.detach())
# Finding Surrogate Loss:
advantages = rewards - state_values.detach()
surr1 = ratios * advantages
surr2 = torch.clamp(ratios, 1-self.eps_clip, 1+self.eps_clip) * advantages
#total_loss_function = l_clip, loss_critic, loss_entropy
loss = -torch.min(surr1, surr2) + 0.5*self.MseLoss(state_values, rewards) - 0.01*dist_entropy
# take gradient step
self.optimizer.zero_grad()
loss.mean().backward()
self.optimizer.step()
# Copy new weights into old policy:
self.policy_old.load_state_dict(self.policy.state_dict())
epoch_success_rate = []
clip_obs = 100
def _preproc_og( o, g):
#(o - o.mean())/o.std()
o = np.clip(o, -clip_obs, clip_obs)
#(g-g.mean())/g.std()
g = np.clip(g, -clip_obs, clip_obs)
return o, g
def launch():
############## Hyperparameters ##############
env_name = "HandManipulateEggFull-v0"
render = False
solved_reward = 300 # stop training if avg_reward > solved_reward
log_interval = 20 # print avg reward in the interval
max_episodes = 800 # max training episodes
max_timesteps = 1500 # max timesteps in one episode
update_timestep = 400 # update policy every n timesteps
action_std = 0.5 # constant std for action distribution (Multivariate Normal)
K_epochs = 40 # update policy for K epochs
eps_clip = 0.24 # clip parameter for PPO
gamma = 0.99 # discount factor
lr = 0.00101 # parameters for Adam optimizer
betas = (0.9, 0.999)
random_seed = 123
#############################################
# creating environment
env = gym.make(env_name)
state_dim_pre = env.observation_space['observation'].shape[0]
goal_dim = env.observation_space['desired_goal'].shape[0]
state_dim = state_dim_pre + goal_dim
action_dim = env.action_space.shape[0]
env.seed(123)
np.random.seed(123)
torch.manual_seed(123)
'''
if random_seed:
print("Random Seed: {}".format(random_seed))
torch.manual_seed(random_seed)
env.seed(random_seed)
np.random.seed(random_seed)
'''
memory = Memory()
ppo = PPO(state_dim, action_dim, action_std, lr, betas, gamma, K_epochs, eps_clip)
print(lr,betas)
# logging variables
running_reward = 0
avg_length = 0
time_step = 0
# training loop
for epoch in range(350):
running_reward = 0
avg_length = 0
time_step = 0
for i_episode in range(1, max_episodes+1):
episode_success_rate = []
state_ = env.reset()
env.env.reward_type = 'dense'
obs = state_['observation']
g = state_['desired_goal']
#clip the oberservation and goal into range -200, 200
#obs, g = _preproc_og(obs, g)
state = np.concatenate([obs,g])
local_timestep = 0
for t in range(env._max_episode_steps):
local_timestep += 1
time_step +=1
# Running policy_old:
action = ppo.select_action(state, memory)
state_, reward, done, info = env.step(action)
#episode_success_rate.append(info['is_success'])
#env.render()
obs = state_['observation']
g = state_['desired_goal']
#obs, g = _preproc_og(obs, g)
state = np.concatenate([obs,g])
# Saving reward and is_terminals:
memory.rewards.append(reward)
memory.is_terminals.append(done)
# update if its time
#to go below
running_reward += reward
if done :
break
if local_timestep<49:
print('Goaaaaaaaaaaaaaaaal')
#episode_success_rate = np.array(episode_success_rate)
#episode_success_rate = np.mean(episode_success_rate)
avg_length += t
# to go up form here ------>
# stop training if avg_reward > solved_reward
if running_reward > (log_interval*solved_reward):
torch.save(ppo.policy.state_dict(), '/home/muhyahiarl/ppo_grad_project/PPO_continuous_{}.pth'.format(env_name))
print("########## Solved! ##########")
break
# save every 400 episodes
if i_episode % 400 == 0:
torch.save(ppo.policy.state_dict(), '/home/muhyahiarl/ppo_grad_project/PPO_continuous_{}.pth'.format(env_name))
print('updated')
# logging
'''
if i_episode % log_interval == 0:
avg_length = int(avg_length/log_interval)
running_reward = int((running_reward/log_interval))
print('Episode {} \t Avg length: {} \t Avg reward: {}'.format(i_episode, avg_length, running_reward))
running_reward = 0
avg_length = 0
'''
# unitl here <----
#
# bring that bitch below to here |||||
#
#
#
print('reach here_0')
ppo.update(memory)
print('reach here_1')
#memory.clear_memory()
time_step = 0
state_ = env.reset()
env.env.reward_type = 'dense'
print('reach here_2')
obs = state_['observation']
g = state_['desired_goal']
state = np.concatenate([obs,g])
local_timestep = 0
test_success_rate = []
for _ in range(10):
local_success_rate = []
state_ = env.reset()
state = np.concatenate([state_['observation'], state_['desired_goal']])
for t in range(env._max_episode_steps):
local_timestep += 1
time_step +=1
# Running policy_old:
action = ppo.select_action(state, memory)
state_, reward, done, info = env.step(action)
obs = state_['observation']
g = state_['desired_goal']
#obs, g = _preproc_og(obs, g)
state = np.concatenate([obs ,g])
local_success_rate.append(info['is_success'])
if done:
break
local_success_rate = np.array(local_success_rate)
test_success_rate.append(np.mean(local_success_rate))
local_test_success = np.mean(np.array(test_success_rate))
epoch_success_rate.append(local_test_success)
print('epoch ' +str(epoch) +' success rate is ',local_test_success)
memory.clear_memory()
avg_length += t
def _eval_agent():
test_success_rate = []
for _ in range(10):
| local_success_rate = []
state_ = env.reset()
state = np.concatenate([state_['observation'], state_['desired_goal']])
for t in range(env._max_episode_steps):
local_timestep += 1
time_step +=1
# Running policy_old:
action = ppo.select_action(state, memory)
state_, reward, done, info = env.step(action)
obs = state_['observation']
g = state_['desired_goal']
#obs, g = _preproc_og(obs, g)
state = np.concatenate([obs ,g])
local_success_rate.append(info['is_success'])
if done:
break
local_success_rate = np.array(local_success_rate)
test_success_rate.append(np.mean(local_success_rate)) | conditional_block |
|
ppo_fetch_reach.py | anh()
)
#the output of network is the mean of actions
# critic
self.critic = nn.Sequential(
nn.Linear(state_dim, 256),
nn.ReLU(),
nn.Linear(256, 256),
nn.ReLU(),
nn.Linear(256, 256),
nn.ReLU(),
nn.Linear(256, 1)
)
#torch.full returns torch_tensor of size action_dim and filled with value action_std*action_std
#action_var --> variance of the action
self.action_var = torch.full((action_dim,), action_std*action_std).to(device)
def forward(self):
raise NotImplementedError
def act(self, state, memory):
#
action_mean = self.actor(state)
#the covariance is 2D diagonal array of action variance
#torch.diagonal --> since action_var is one_dim tensor torch.diag returns 2D array with tensor's values as main diagonal
cov_mat = torch.diag(self.action_var).to(device)
#sample actions from Gaussian Distribution with mean = action_mean, and covariance = action variance, cov_mat
dist = MultivariateNormal(action_mean, cov_mat)
#sample action
action = dist.sample()
action_logprob = dist.log_prob(action)
memory.states.append(state)
memory.actions.append(action)
memory.logprobs.append(action_logprob)
return action.detach()
def evaluate(self, state, action):
action_mean = self.actor(state)
action_var = self.action_var.expand_as(action_mean)
#torch.diag_embed returns 2D diagnoal array with tensor's elements as main diagonal
cov_mat = torch.diag_embed(action_var).to(device)
dist = MultivariateNormal(action_mean, cov_mat)
# its probablitis not values Pi(a|s)
action_logprobs = dist.log_prob(action)
dist_entropy = dist.entropy()
state_value = self.critic(state)
return action_logprobs, torch.squeeze(state_value), dist_entropy
class PPO:
def __init__(self, state_dim, action_dim, action_std, lr, betas, gamma, K_epochs, eps_clip):
self.lr = lr
self.betas = betas
self.gamma = gamma
self.eps_clip = eps_clip
self.K_epochs = K_epochs
self.policy = ActorCritic(state_dim, action_dim, action_std).to(device)
self.optimizer = torch.optim.Adam(self.policy.parameters(), lr=lr, betas=betas)
self.policy_old = ActorCritic(state_dim, action_dim, action_std).to(device)
self.policy_old.load_state_dict(self.policy.state_dict())
self.MseLoss = nn.MSELoss()
def select_action(self, state, memory):
#state.reshape --> change state into vector - single row - of compitable length with the orignal state
state = torch.FloatTensor(state.reshape(1, -1)).to(device)
return self.policy_old.act(state, memory).cpu().data.numpy().flatten()
def update(self, memory):
# Monte Carlo estimate of rewards:
rewards = []
discounted_reward = 0
#loop through all episodes in the epoch
for reward, is_terminal in zip(reversed(memory.rewards), reversed(memory.is_terminals)):
if is_terminal:
discounted_reward = 0
discounted_reward = reward + (self.gamma * discounted_reward)
#insert the new discounted reward - after completing the episode - in index number 0 and push old ones
rewards.insert(0, discounted_reward) | rewards = torch.tensor(rewards).to(device)
rewards = (rewards - rewards.mean()) / (rewards.std() + 1e-5)
# convert list to tensor
old_states = torch.squeeze(torch.stack(memory.states).to(device), 1).detach()
old_actions = torch.squeeze(torch.stack(memory.actions).to(device), 1).detach()
old_logprobs = torch.squeeze(torch.stack(memory.logprobs), 1).to(device).detach()
# Optimize policy for K epochs:
for _ in range(self.K_epochs):
# Evaluating old actions and values :
logprobs, state_values, dist_entropy = self.policy.evaluate(old_states, old_actions)
# Finding the ratio (pi_theta / pi_theta__old):
#exp(log(prob)) = prob
ratios = torch.exp(logprobs - old_logprobs.detach())
# Finding Surrogate Loss:
advantages = rewards - state_values.detach()
surr1 = ratios * advantages
surr2 = torch.clamp(ratios, 1-self.eps_clip, 1+self.eps_clip) * advantages
#total_loss_function = l_clip, loss_critic, loss_entropy
loss = -torch.min(surr1, surr2) + 0.5*self.MseLoss(state_values, rewards) - 0.01*dist_entropy
# take gradient step
self.optimizer.zero_grad()
loss.mean().backward()
self.optimizer.step()
# Copy new weights into old policy:
self.policy_old.load_state_dict(self.policy.state_dict())
epoch_success_rate = []
clip_obs = 100
def _preproc_og( o, g):
#(o - o.mean())/o.std()
o = np.clip(o, -clip_obs, clip_obs)
#(g-g.mean())/g.std()
g = np.clip(g, -clip_obs, clip_obs)
return o, g
def launch():
############## Hyperparameters ##############
env_name = "HandManipulateEggFull-v0"
render = False
solved_reward = 300 # stop training if avg_reward > solved_reward
log_interval = 20 # print avg reward in the interval
max_episodes = 800 # max training episodes
max_timesteps = 1500 # max timesteps in one episode
update_timestep = 400 # update policy every n timesteps
action_std = 0.5 # constant std for action distribution (Multivariate Normal)
K_epochs = 40 # update policy for K epochs
eps_clip = 0.24 # clip parameter for PPO
gamma = 0.99 # discount factor
lr = 0.00101 # parameters for Adam optimizer
betas = (0.9, 0.999)
random_seed = 123
#############################################
# creating environment
env = gym.make(env_name)
state_dim_pre = env.observation_space['observation'].shape[0]
goal_dim = env.observation_space['desired_goal'].shape[0]
state_dim = state_dim_pre + goal_dim
action_dim = env.action_space.shape[0]
env.seed(123)
np.random.seed(123)
torch.manual_seed(123)
'''
if random_seed:
print("Random Seed: {}".format(random_seed))
torch.manual_seed(random_seed)
env.seed(random_seed)
np.random.seed(random_seed)
'''
memory = Memory()
ppo = PPO(state_dim, action_dim, action_std, lr, betas, gamma, K_epochs, eps_clip)
print(lr,betas)
# logging variables
running_reward = 0
avg_length = 0
time_step = 0
# training loop
for epoch in range(350):
running_reward = 0
avg_length = 0
time_step = 0
for i_episode in range(1, max_episodes+1):
episode_success_rate = []
state_ = env.reset()
env.env.reward_type = 'dense'
obs = state_['observation']
g = state_['desired_goal']
#clip the oberservation and goal into range -200, 200
#obs, g = _preproc_og(obs, g)
state = np.concatenate([obs,g])
local_timestep = 0
for t in range(env._max_episode_steps):
local_timestep += 1
time_step +=1
# Running policy_old:
action = ppo.select_action(state, memory)
state_, reward, done, info = env.step(action)
#episode_success_rate.append(info['is_success'])
#env.render()
obs = state_['observation']
g = state_['desired_goal']
#obs, g = _preproc_og(obs, g)
state = np.concatenate([obs,g])
# Saving reward and is_terminals:
memory.rewards.append(reward)
memory.is_terminals.append(done)
# update if its time
#to go below
running_reward += reward
if done :
break
if local_timestep<49:
print('Goaaaaaaaaaaaaaaaal')
#episode_success_rate = np.array(episode_success_rate)
#episode_success_rate = np.mean(episode_success_rate)
avg_length += t
# to go up form here ------>
# stop training if avg_reward > solved_reward
if running_reward > (log_interval*solved_reward):
torch.save(ppo.policy.state_dict(), '/home/muhyahiarl/ppo_grad_project |
# Normalizing the rewards: | random_line_split |
ppo_fetch_reach.py | :
def __init__(self):
self.actions = []
#in our case self.inputs ==> input state//goal
self.states = []
self.logprobs = []
self.rewards = []
self.is_terminals = []
def clear_memory(self):
#in ddpg you can learn from data stored from older epochs
del self.actions[:]
del self.states[:]
del self.logprobs[:]
del self.rewards[:]
del self.is_terminals[:]
class ActorCritic(nn.Module):
'''
continous action space PG methods the actor returns tensor of lenght number of action
and those tensors are mean and covariance -- use Gaussian Distribution to Sample from action space
covarinace is used to balance exploration-exploitation problem
'''
def __init__(self, state_dim, action_dim, action_std):
super(ActorCritic, self).__init__()
# action mean range -1 to 1
self.actor = nn.Sequential(
nn.Linear(state_dim, 256),
nn.ReLU(),
nn.Linear(256, 256),
nn.ReLU(),
nn.Linear(256,256),
nn.ReLU(),
nn.Linear(256, action_dim),
nn.Tanh()
)
#the output of network is the mean of actions
# critic
self.critic = nn.Sequential(
nn.Linear(state_dim, 256),
nn.ReLU(),
nn.Linear(256, 256),
nn.ReLU(),
nn.Linear(256, 256),
nn.ReLU(),
nn.Linear(256, 1)
)
#torch.full returns torch_tensor of size action_dim and filled with value action_std*action_std
#action_var --> variance of the action
self.action_var = torch.full((action_dim,), action_std*action_std).to(device)
def forward(self):
raise NotImplementedError
def act(self, state, memory):
#
action_mean = self.actor(state)
#the covariance is 2D diagonal array of action variance
#torch.diagonal --> since action_var is one_dim tensor torch.diag returns 2D array with tensor's values as main diagonal
cov_mat = torch.diag(self.action_var).to(device)
#sample actions from Gaussian Distribution with mean = action_mean, and covariance = action variance, cov_mat
dist = MultivariateNormal(action_mean, cov_mat)
#sample action
action = dist.sample()
action_logprob = dist.log_prob(action)
memory.states.append(state)
memory.actions.append(action)
memory.logprobs.append(action_logprob)
return action.detach()
def evaluate(self, state, action):
action_mean = self.actor(state)
action_var = self.action_var.expand_as(action_mean)
#torch.diag_embed returns 2D diagnoal array with tensor's elements as main diagonal
cov_mat = torch.diag_embed(action_var).to(device)
dist = MultivariateNormal(action_mean, cov_mat)
# its probablitis not values Pi(a|s)
action_logprobs = dist.log_prob(action)
dist_entropy = dist.entropy()
state_value = self.critic(state)
return action_logprobs, torch.squeeze(state_value), dist_entropy
class PPO:
def __init__(self, state_dim, action_dim, action_std, lr, betas, gamma, K_epochs, eps_clip):
self.lr = lr
self.betas = betas
self.gamma = gamma
self.eps_clip = eps_clip
self.K_epochs = K_epochs
self.policy = ActorCritic(state_dim, action_dim, action_std).to(device)
self.optimizer = torch.optim.Adam(self.policy.parameters(), lr=lr, betas=betas)
self.policy_old = ActorCritic(state_dim, action_dim, action_std).to(device)
self.policy_old.load_state_dict(self.policy.state_dict())
self.MseLoss = nn.MSELoss()
def select_action(self, state, memory):
#state.reshape --> change state into vector - single row - of compitable length with the orignal state
state = torch.FloatTensor(state.reshape(1, -1)).to(device)
return self.policy_old.act(state, memory).cpu().data.numpy().flatten()
def update(self, memory):
# Monte Carlo estimate of rewards:
rewards = []
discounted_reward = 0
#loop through all episodes in the epoch
for reward, is_terminal in zip(reversed(memory.rewards), reversed(memory.is_terminals)):
if is_terminal:
discounted_reward = 0
discounted_reward = reward + (self.gamma * discounted_reward)
#insert the new discounted reward - after completing the episode - in index number 0 and push old ones
rewards.insert(0, discounted_reward)
# Normalizing the rewards:
rewards = torch.tensor(rewards).to(device)
rewards = (rewards - rewards.mean()) / (rewards.std() + 1e-5)
# convert list to tensor
old_states = torch.squeeze(torch.stack(memory.states).to(device), 1).detach()
old_actions = torch.squeeze(torch.stack(memory.actions).to(device), 1).detach()
old_logprobs = torch.squeeze(torch.stack(memory.logprobs), 1).to(device).detach()
# Optimize policy for K epochs:
for _ in range(self.K_epochs):
# Evaluating old actions and values :
logprobs, state_values, dist_entropy = self.policy.evaluate(old_states, old_actions)
# Finding the ratio (pi_theta / pi_theta__old):
#exp(log(prob)) = prob
ratios = torch.exp(logprobs - old_logprobs.detach())
# Finding Surrogate Loss:
advantages = rewards - state_values.detach()
surr1 = ratios * advantages
surr2 = torch.clamp(ratios, 1-self.eps_clip, 1+self.eps_clip) * advantages
#total_loss_function = l_clip, loss_critic, loss_entropy
loss = -torch.min(surr1, surr2) + 0.5*self.MseLoss(state_values, rewards) - 0.01*dist_entropy
# take gradient step
self.optimizer.zero_grad()
loss.mean().backward()
self.optimizer.step()
# Copy new weights into old policy:
self.policy_old.load_state_dict(self.policy.state_dict())
epoch_success_rate = []
clip_obs = 100
def _preproc_og( o, g):
#(o - o.mean())/o.std()
o = np.clip(o, -clip_obs, clip_obs)
#(g-g.mean())/g.std()
g = np.clip(g, -clip_obs, clip_obs)
return o, g
def launch():
############## Hyperparameters ##############
env_name = "HandManipulateEggFull-v0"
render = False
solved_reward = 300 # stop training if avg_reward > solved_reward
log_interval = 20 # print avg reward in the interval
max_episodes = 800 # max training episodes
max_timesteps = 1500 # max timesteps in one episode
update_timestep = 400 # update policy every n timesteps
action_std = 0.5 # constant std for action distribution (Multivariate Normal)
K_epochs = 40 # update policy for K epochs
eps_clip = 0.24 # clip parameter for PPO
gamma = 0.99 # discount factor
lr = 0.00101 # parameters for Adam optimizer
betas = (0.9, 0.999)
random_seed = 123
#############################################
# creating environment
env = gym.make(env_name)
state_dim_pre = env.observation_space['observation'].shape[0]
goal_dim = env.observation_space['desired_goal'].shape[0]
state_dim = state_dim_pre + goal_dim
action_dim = env.action_space.shape[0]
env.seed(123)
np.random.seed(123)
torch.manual_seed(123)
'''
if random_seed:
print("Random Seed: {}".format(random_seed))
torch.manual_seed(random_seed)
env.seed(random_seed)
np.random.seed(random_seed)
'''
memory = Memory()
ppo = PPO(state_dim, action_dim, action_std, lr, betas, gamma, K_epochs, eps_clip)
print(lr,betas)
# logging variables
running_reward = 0
avg_length = 0
time_step = 0
# training loop
for epoch in range(350):
running_reward = 0
avg_length = 0
time_step = 0
for i_episode in range(1, max_episodes+1):
episode_success_rate = []
state_ = env.reset()
env.env.reward_type = 'dense'
obs = state_['observation']
g = state_['desired_goal']
#clip the oberservation and goal into range -200, 200
#obs, g = _preproc_og(obs, g)
state = np.concatenate([obs,g])
local | Memory | identifier_name |
|
bot.js |
}else if(global.dictmode == 1){
var res = request.split(' ').map(e=>closest.request(e)).filter(e=>e.query != null && e.answer != 'false')
if(res.length == 0) return false
else return res[0].answer
}
}
};
const bot = {
lpevent, msgevent, amsgevent, dict, VK, api, colors,
status(newstatus){
api.call('status.set', {text: newstatus})
},
authid(){return global.authid},
botname(){return global.botname},
version: {
codenum: greet[1].match(/v\d.\d.\d\s\S\w*/g),
num: greet[1].match(/v\d.\d.\d/g)
},
getArgs(text, isConv){
try{
if(isConv){
return text.split(' ').slice(2)
}
else{
return text.split(' ').slice(1)
}
}catch(e){
return [text];
}
},
getAnswer(peer){return '[id'+peer.s.id+'|'+peer.s.fname+'], ';},
init(opts){
if(opts.rucaptcha){o = opts.rucaptcha}else{o = null}
api.init(opts.token, o)
closest.setlow(opts.dictmatch || 1)
global.dictmode = opts.dictmode || 0
closest.setlow('false');
global.botname = opts.botname.map(e => e.toLowerCase());
//Authorize user
console.log( | и об аккаунте...`);
api.call('users.get', {}).then(res =>{
authid = res[0].id
console.log(`[${il.ts()} | init]`.green,`Успешно авторизовано как ${colors.green(res[0].first_name+' '+res[0].last_name+' (ID: '+res[0].id+')')}`);
});
//Set status
api.call('status.set', {text: opts.status ? opts.status : 'sBot '+bot.version.codenum, group_id: 0});
//Load cache
console.log(`[${il.ts()} | init]`.green,`Загрузка кэша пользователей...`);
fs.access('cache.json', fs.constants.F_OK, function(err){
if(err){
fs.writeFile('cache.json', '{\n}', function(err){
if(err){console.log(`[${il.ts()} | init]`.red,`Ошибка при создании файла кэша`)}
else{console.log(`[${il.ts()} | init]`.green,`Создан файл кэша`); global.cache = {};}})
}else{
fs.readFile('cache.json', 'utf-8', function(err, file){
try{global.cache = JSON.parse(file);
console.log(`[${il.ts()} | init]`.green,`Загружено ${colors.green(Object.keys(global.cache).length+' пользователей')} из кэша`);
}catch(e){
console.log(`[${il.ts()} | init] Файл кэша поврежден, идёт перезапись...`.red);
fs.writeFile('cache.json', '{\n}', function(err){if(err){console.log(`[${il.ts()} | init]`.red,'Ошибка при перезаписи файла кэша')}
else{console.log(`[${il.ts()} | init]`.green,'Файл кэша успешно перезаписан'); global.cache = {};}})
}
});
}
});
//Initialize LongPoll connection
api.call('messages.getLongPollServer', {use_ssl: 1}).then(res => {
console.log(`[${il.ts()} | init]`.green,`Запуск цикла LongPoll...`);il.longpollLoop(res);});
//Online loop
console.log(`[${il.ts()} | init]`.green,`Запуск цикла установки онлайна...`);
api.call('account.setOnline', {});
setInterval(function(){api.call('account.setOnline')}, 270000);
},
sendmsg(type, id, msg, attach, sid, callback){
if (type == 'conv') {id = id + 2000000000}
if (attach) {obj = {peer_id: id, message: msg, attachment: attach}}
else if(sid) {obj = {peer_id: id, sticker_id: sid}}
else {obj = {peer_id: id, message: msg}}
api.call('messages.send', obj)
.then(res => {
if(res.error){
switch(res.error.error_code){
case 902:
console.log(`[${il.ts()} | msg] Ошибка при отправке: Запрещено настройками приватности`.red);break;
case 900:
console.log(`[${il.ts()} | msg] Ошибка при отправке: Добавлен в чёрный список`.red);break;
default:
console.log(`[${il.ts()} | msg] Ошибка при отправке: ${error.msg}`.red);break;
}
}
else{
console.log(`[${il.ts()} | msg =>] ${sid ? '(Стикер #'+sid+')' : il.msgmask(id, msg)}`.cyan);
sentids.push(res)
if(callback) callback()
}
})
},
use(callback){
global.use = callback;
}
};
module.exports = bot;
const il = {
msgmask(id,msg){return id+': '+msg;},
ts(){return moment().format('DD.MM.YY HH:mm:ss')},
lpmask(obj){
if(obj.type == 'conv'){return `[${obj.name} | ${obj.id}] ${obj.s.fname} ${obj.s.lname}: ${obj.msg.full}`}
else{return `[${obj.s.fname} ${obj.s.lname}, ${obj.id}]: ${obj.msg.full}`}
},
parselp(answer, callback){
function pu(userid){
return new Promise(function(resolve, reject){ // Parses user id and saves it to cache, or returns value from cache
if(global.cache[userid]){
resolve({id: userid, fname: global.cache[userid][0], lname: global.cache[userid][1]});
}else{
api.call('users.get', {user_ids: userid}).then(res => {
global.cache[userid] = [res[0].first_name, res[0].last_name];
console.log(`[${il.ts()} | cache] Пользователь ${userid} кэширован`.green);
fs.writeFile('cache.json', JSON.stringify(global.cache, null, 2), null);
resolve({id: userid, fname: res[0].first_name, lname: res[0].last_name});
})
}
})
}
answer.forEach(function(ans){
if(ans[7] && ans[7]['source_act']){
function truncate(id, nc){
return new Promise(function(resolve, reject){
if(nc){
Promise.all([api.call('users.get', {user_ids: id, name_case: nc}), pu(id)])
.then(res => {resolve({t: `${res[0][0].first_name} ${res[0][0].last_name}`,f: res[1]})})
}else{pu(id).then(usr => {resolve({t: `${usr.fname.charAt(0)}. ${usr.lname}`, f: usr})})}
})
}
switch(ans[7]['source_act']){
case 'chat_create': // Someone created chat
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} создал беседу "${ans[7]['source_text']}"`.green)
lpevent.emit('chat_create', {name: ans[7]['source_text'], admin: name.f, peer_id: ans[3]});
})
break;
case 'chat_title_update': // Someone updated chat name
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} изменил название беседы с "${ans[7]['source_old_text']}" на "${ans[7]['source_text']}"`.green)
lpevent.emit('chat_title_update', {oldname: ans[7]['source_old_text'], newname: ans[7]['source_text'], user: name.f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_photo_update': // Someone updated chat photo
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} изменил фото в беседе с id ${ans[3]-2000000000}`.green)
lpevent.emit('chat_photo_update', {photo: ans[7]['attach1'], user: name.f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_invite_user': // Someone invited user into chat
Promise.all([ | `[${il.ts()} | init]`.green,`Получение информаци | identifier_body |
bot.js |
}else if(global.dictmode == 1){
var res = request.split(' ').map(e=>closest.request(e)).filter(e=>e.query != null && e.answer != 'false')
if(res.length == 0) return false
else return res[0].answer
}
}
};
const bot = {
lpevent, msgevent, amsgevent, dict, VK, api, colors,
status(newstatus){
api.call('status.set', {text: newstatus})
},
authid(){return global.authid},
botname(){return global.botname},
version: {
codenum: greet[1].match(/v\d.\d.\d\s\S\w*/g),
num: greet[1].match(/v\d.\d.\d/g)
},
getArgs(text, isConv){
try{
if(isConv){
return text.split(' ').slice(2)
}
else{
return text.split(' ').slice(1)
}
}catch(e){
return [text];
}
},
getAnswer(peer){return '[id'+peer.s.id+'|'+peer.s.fname+'], ';},
init(opts){
if(opts.rucaptcha){o = opts.rucaptcha}else{o = null}
api.init(o | en, o)
closest.setlow(opts.dictmatch || 1)
global.dictmode = opts.dictmode || 0
closest.setlow('false');
global.botname = opts.botname.map(e => e.toLowerCase());
//Authorize user
console.log(`[${il.ts()} | init]`.green,`Получение информации об аккаунте...`);
api.call('users.get', {}).then(res =>{
authid = res[0].id
console.log(`[${il.ts()} | init]`.green,`Успешно авторизовано как ${colors.green(res[0].first_name+' '+res[0].last_name+' (ID: '+res[0].id+')')}`);
});
//Set status
api.call('status.set', {text: opts.status ? opts.status : 'sBot '+bot.version.codenum, group_id: 0});
//Load cache
console.log(`[${il.ts()} | init]`.green,`Загрузка кэша пользователей...`);
fs.access('cache.json', fs.constants.F_OK, function(err){
if(err){
fs.writeFile('cache.json', '{\n}', function(err){
if(err){console.log(`[${il.ts()} | init]`.red,`Ошибка при создании файла кэша`)}
else{console.log(`[${il.ts()} | init]`.green,`Создан файл кэша`); global.cache = {};}})
}else{
fs.readFile('cache.json', 'utf-8', function(err, file){
try{global.cache = JSON.parse(file);
console.log(`[${il.ts()} | init]`.green,`Загружено ${colors.green(Object.keys(global.cache).length+' пользователей')} из кэша`);
}catch(e){
console.log(`[${il.ts()} | init] Файл кэша поврежден, идёт перезапись...`.red);
fs.writeFile('cache.json', '{\n}', function(err){if(err){console.log(`[${il.ts()} | init]`.red,'Ошибка при перезаписи файла кэша')}
else{console.log(`[${il.ts()} | init]`.green,'Файл кэша успешно перезаписан'); global.cache = {};}})
}
});
}
});
//Initialize LongPoll connection
api.call('messages.getLongPollServer', {use_ssl: 1}).then(res => {
console.log(`[${il.ts()} | init]`.green,`Запуск цикла LongPoll...`);il.longpollLoop(res);});
//Online loop
console.log(`[${il.ts()} | init]`.green,`Запуск цикла установки онлайна...`);
api.call('account.setOnline', {});
setInterval(function(){api.call('account.setOnline')}, 270000);
},
sendmsg(type, id, msg, attach, sid, callback){
if (type == 'conv') {id = id + 2000000000}
if (attach) {obj = {peer_id: id, message: msg, attachment: attach}}
else if(sid) {obj = {peer_id: id, sticker_id: sid}}
else {obj = {peer_id: id, message: msg}}
api.call('messages.send', obj)
.then(res => {
if(res.error){
switch(res.error.error_code){
case 902:
console.log(`[${il.ts()} | msg] Ошибка при отправке: Запрещено настройками приватности`.red);break;
case 900:
console.log(`[${il.ts()} | msg] Ошибка при отправке: Добавлен в чёрный список`.red);break;
default:
console.log(`[${il.ts()} | msg] Ошибка при отправке: ${error.msg}`.red);break;
}
}
else{
console.log(`[${il.ts()} | msg =>] ${sid ? '(Стикер #'+sid+')' : il.msgmask(id, msg)}`.cyan);
sentids.push(res)
if(callback) callback()
}
})
},
use(callback){
global.use = callback;
}
};
module.exports = bot;
const il = {
msgmask(id,msg){return id+': '+msg;},
ts(){return moment().format('DD.MM.YY HH:mm:ss')},
lpmask(obj){
if(obj.type == 'conv'){return `[${obj.name} | ${obj.id}] ${obj.s.fname} ${obj.s.lname}: ${obj.msg.full}`}
else{return `[${obj.s.fname} ${obj.s.lname}, ${obj.id}]: ${obj.msg.full}`}
},
parselp(answer, callback){
function pu(userid){
return new Promise(function(resolve, reject){ // Parses user id and saves it to cache, or returns value from cache
if(global.cache[userid]){
resolve({id: userid, fname: global.cache[userid][0], lname: global.cache[userid][1]});
}else{
api.call('users.get', {user_ids: userid}).then(res => {
global.cache[userid] = [res[0].first_name, res[0].last_name];
console.log(`[${il.ts()} | cache] Пользователь ${userid} кэширован`.green);
fs.writeFile('cache.json', JSON.stringify(global.cache, null, 2), null);
resolve({id: userid, fname: res[0].first_name, lname: res[0].last_name});
})
}
})
}
answer.forEach(function(ans){
if(ans[7] && ans[7]['source_act']){
function truncate(id, nc){
return new Promise(function(resolve, reject){
if(nc){
Promise.all([api.call('users.get', {user_ids: id, name_case: nc}), pu(id)])
.then(res => {resolve({t: `${res[0][0].first_name} ${res[0][0].last_name}`,f: res[1]})})
}else{pu(id).then(usr => {resolve({t: `${usr.fname.charAt(0)}. ${usr.lname}`, f: usr})})}
})
}
switch(ans[7]['source_act']){
case 'chat_create': // Someone created chat
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} создал беседу "${ans[7]['source_text']}"`.green)
lpevent.emit('chat_create', {name: ans[7]['source_text'], admin: name.f, peer_id: ans[3]});
})
break;
case 'chat_title_update': // Someone updated chat name
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} изменил название беседы с "${ans[7]['source_old_text']}" на "${ans[7]['source_text']}"`.green)
lpevent.emit('chat_title_update', {oldname: ans[7]['source_old_text'], newname: ans[7]['source_text'], user: name.f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_photo_update': // Someone updated chat photo
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} изменил фото в беседе с id ${ans[3]-2000000000}`.green)
lpevent.emit('chat_photo_update', {photo: ans[7]['attach1'], user: name.f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_invite_user': // Someone invited user into chat
Promise.all([ | pts.tok | identifier_name |
bot.js | Пользователь ${userid} кэширован`.green);
fs.writeFile('cache.json', JSON.stringify(global.cache, null, 2), null);
resolve({id: userid, fname: res[0].first_name, lname: res[0].last_name});
})
}
})
}
answer.forEach(function(ans){
if(ans[7] && ans[7]['source_act']){
function truncate(id, nc){
return new Promise(function(resolve, reject){
if(nc){
Promise.all([api.call('users.get', {user_ids: id, name_case: nc}), pu(id)])
.then(res => {resolve({t: `${res[0][0].first_name} ${res[0][0].last_name}`,f: res[1]})})
}else{pu(id).then(usr => {resolve({t: `${usr.fname.charAt(0)}. ${usr.lname}`, f: usr})})}
})
}
switch(ans[7]['source_act']){
case 'chat_create': // Someone created chat
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} создал беседу "${ans[7]['source_text']}"`.green)
lpevent.emit('chat_create', {name: ans[7]['source_text'], admin: name.f, peer_id: ans[3]});
})
break;
case 'chat_title_update': // Someone updated chat name
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} изменил название беседы с "${ans[7]['source_old_text']}" на "${ans[7]['source_text']}"`.green)
lpevent.emit('chat_title_update', {oldname: ans[7]['source_old_text'], newname: ans[7]['source_text'], user: name.f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_photo_update': // Someone updated chat photo
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} изменил фото в беседе с id ${ans[3]-2000000000}`.green)
lpevent.emit('chat_photo_update', {photo: ans[7]['attach1'], user: name.f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_invite_user': // Someone invited user into chat
Promise.all([truncate(ans[7].from), truncate(ans[7].source_mid, 'gen')])
.then(n =>{
console.log(`[${il.ts()} | lpevent]`.inverse, `${n[0].t} пригласил ${n[1].t} в беседу с id ${ans[3]-2000000000}`.green)
lpevent.emit('chat_invite_user', {inviter: n[0].f, invited: n[1].f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_kick_user': // Someone kicked user from chat
Promise.all([truncate(ans[7].from), truncate(ans[7].source_mid, 'gen')])
.then(n =>{
console.log(`[${il.ts()} | lpevent]`.inverse, `${n[0].t} исключил ${n[1].t} из беседы с id ${ans[3]-2000000000}`.green)
lpevent.emit('chat_kick_user', {kicker: n[0].f, kicked: n[1].f, peer_id: ans[3]-2000000000});
})
break;
default:
break;
}
}else{
switch(ans[0]){ // Parse LongPoll's updates array
case 7: // Read incoming messages
lpevent.emit('read', {peer_id: ans[1], local_id: ans[2]})
break;
case 8: // Friend goes online
lpevent.emit('online', {user_id: ans[1]})
break;
case 9: // Friend goes offline
lpevent.emit('offline', {user_id: ans[1], flags: ans[2]})
break;
case 61: // User started writing
lpevent.emit('userwrite', {user_id: ans[1]})
break;
case 62: // User started writing in chat
lpevent.emit('chatwrite', {user_id: ans[1], chat_id: ans[2]})
break;
case 4: // Incoming message
var isConv = false;
var convName = '', msg = '', userid = 0, forwarded = [];
if(ans[7] && ans[7].from){userid = ans[7]['from'];ans[3] -= 2000000000;isConv = true}
else{userid = ans[3]}
if(ans[7] && ans[7].fwd){forwarded = ans[7].fwd.split(',').map(e => e.split('_')[1])}
if(isConv){convName = ans[5]}
if(!sentids.includes(ans[1])){
pu(userid).then(result => {
resobj = {
type: isConv ? 'conv' : 'user',
id: ans[3],
msgid: ans[1],
fwd: forwarded,
msg: {
full: ans[6],
args: bot.getArgs(ans[6], isConv)
},
s: result,
answer: function(msg, options, stickerid){
bot.sendmsg(isConv ? 'conv' : 'user', ans[3], msg || null, options || null, stickerid || null)
}
};
if(isConv){resobj['name'] = convName}
console.log(`[${il.ts()} | msg <=] ${il.lpmask(resobj)}`.yellow);il.parse(resobj);
})
}else{
sentids.splice(sentids.indexOf(ans[1]), 1)
}
break;
default:
break;
}
}
})
},
longpollLoop(info){
r.post({
url: 'https://'+info.server,
headers: {'content-type' : 'application/x-www-form-urlencoded'},
form: {act: 'a_check', key: info.key, ts: info.ts, wait: 25, mode: 2, version: 1}
}, function(err, resp, body){
try{answer = JSON.parse(body);} catch(e){console.log(err)}
if(answer){
if(answer.updates && answer.updates.length == 0){il.longpollLoop({key: info.key, server: info.server, ts: answer.ts})}
else if(answer.failed == 1){il.longpollLoop({key: info.key, server: info.server, ts: answer.ts})}
else if(answer.failed == 2 || answer.failed == 3){
api.call('messages.getLongPollServer', {use_ssl: 1}).then(res => {
console.log(`${colors.green('['+il.ts()+'| init]')} Цикл LongPoll перезапущен с новым ключом`);il.longpollLoop(res);
});
}else{
il.parselp(answer.updates);
il.longpollLoop({key: info.key, server: info.server, ts: answer.ts})
}
}else{
il.longpollLoop({key: info.key, server: info.server, ts: answer.ts})
}
})
},
parse(msgobj){
cmd = msgobj.msg.full.split(' ');
function a(){return global.botname.includes(cmd[0].replace(/[^a-zа-я]/gi, '').trim().toLowerCase())}
global.use(msgobj.msg.full, a(), function(allow){
if(allow === true){
if(global.botname !== false){
if(msgobj.type == 'conv') {
if(a() && dict.check(cmd.slice(1).join(' '))) {
bot.sendmsg('conv', msgobj.id, bot.getAnswer(msgobj)+dict.check(cmd.slice(1).join(' ')));
}else if(a()){
msgevent.emit(cmd[1].toLowerCase(), msgobj);
}
}else{
if(dict.check(msgobj.msg.full)){
bot.sendmsg('user', msgobj.id, dict.check(msgobj.msg.full))
}else{
msgevent.emit(cmd[0].toLowerCase(), msgobj);
}
}
}
amsgevent.emit(msgobj.msg.full.toLowerCase(), msgobj);
}else if(allow instanceof Error && allow.message){
console.log(`[${il.ts()} | filter ~>] ${allow.message}`.red)
if(msgobj.type == 'conv'){
bot.sendmsg('conv', msgobj.id, bot.getAnswer(msgobj)+allow.message);
}else{
| bot.sendmsg('user', msgobj.id, allow.message)
}
}
})
}
};
| conditional_block |
|
bot.js | , reject){ // Parses user id and saves it to cache, or returns value from cache
if(global.cache[userid]){
resolve({id: userid, fname: global.cache[userid][0], lname: global.cache[userid][1]});
}else{
api.call('users.get', {user_ids: userid}).then(res => {
global.cache[userid] = [res[0].first_name, res[0].last_name];
console.log(`[${il.ts()} | cache] Пользователь ${userid} кэширован`.green);
fs.writeFile('cache.json', JSON.stringify(global.cache, null, 2), null);
resolve({id: userid, fname: res[0].first_name, lname: res[0].last_name});
})
}
})
}
answer.forEach(function(ans){
if(ans[7] && ans[7]['source_act']){
function truncate(id, nc){
return new Promise(function(resolve, reject){
if(nc){
Promise.all([api.call('users.get', {user_ids: id, name_case: nc}), pu(id)])
.then(res => {resolve({t: `${res[0][0].first_name} ${res[0][0].last_name}`,f: res[1]})})
}else{pu(id).then(usr => {resolve({t: `${usr.fname.charAt(0)}. ${usr.lname}`, f: usr})})}
})
}
switch(ans[7]['source_act']){
case 'chat_create': // Someone created chat
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} создал беседу "${ans[7]['source_text']}"`.green)
lpevent.emit('chat_create', {name: ans[7]['source_text'], admin: name.f, peer_id: ans[3]});
})
break;
case 'chat_title_update': // Someone updated chat name
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} изменил название беседы с "${ans[7]['source_old_text']}" на "${ans[7]['source_text']}"`.green)
lpevent.emit('chat_title_update', {oldname: ans[7]['source_old_text'], newname: ans[7]['source_text'], user: name.f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_photo_update': // Someone updated chat photo
truncate(ans[7].from)
.then(name => {
console.log(`[${il.ts()} | lpevent]`.inverse, `${name.t} изменил фото в беседе с id ${ans[3]-2000000000}`.green)
lpevent.emit('chat_photo_update', {photo: ans[7]['attach1'], user: name.f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_invite_user': // Someone invited user into chat
Promise.all([truncate(ans[7].from), truncate(ans[7].source_mid, 'gen')])
.then(n =>{
console.log(`[${il.ts()} | lpevent]`.inverse, `${n[0].t} пригласил ${n[1].t} в беседу с id ${ans[3]-2000000000}`.green)
lpevent.emit('chat_invite_user', {inviter: n[0].f, invited: n[1].f, peer_id: ans[3]-2000000000});
})
break;
case 'chat_kick_user': // Someone kicked user from chat
Promise.all([truncate(ans[7].from), truncate(ans[7].source_mid, 'gen')])
.then(n =>{
console.log(`[${il.ts()} | lpevent]`.inverse, `${n[0].t} исключил ${n[1].t} из беседы с id ${ans[3]-2000000000}`.green)
lpevent.emit('chat_kick_user', {kicker: n[0].f, kicked: n[1].f, peer_id: ans[3]-2000000000});
})
break;
default:
break;
}
}else{
switch(ans[0]){ // Parse LongPoll's updates array
case 7: // Read incoming messages
lpevent.emit('read', {peer_id: ans[1], local_id: ans[2]})
break;
case 8: // Friend goes online
lpevent.emit('online', {user_id: ans[1]})
break;
case 9: // Friend goes offline
lpevent.emit('offline', {user_id: ans[1], flags: ans[2]})
break;
case 61: // User started writing
lpevent.emit('userwrite', {user_id: ans[1]})
break;
case 62: // User started writing in chat
lpevent.emit('chatwrite', {user_id: ans[1], chat_id: ans[2]})
break;
case 4: // Incoming message
var isConv = false;
var convName = '', msg = '', userid = 0, forwarded = [];
if(ans[7] && ans[7].from){userid = ans[7]['from'];ans[3] -= 2000000000;isConv = true}
else{userid = ans[3]}
if(ans[7] && ans[7].fwd){forwarded = ans[7].fwd.split(',').map(e => e.split('_')[1])}
if(isConv){convName = ans[5]}
if(!sentids.includes(ans[1])){
pu(userid).then(result => {
resobj = {
type: isConv ? 'conv' : 'user',
id: ans[3],
msgid: ans[1],
fwd: forwarded,
msg: {
full: ans[6],
args: bot.getArgs(ans[6], isConv)
},
s: result,
answer: function(msg, options, stickerid){
bot.sendmsg(isConv ? 'conv' : 'user', ans[3], msg || null, options || null, stickerid || null)
}
};
if(isConv){resobj['name'] = convName}
console.log(`[${il.ts()} | msg <=] ${il.lpmask(resobj)}`.yellow);il.parse(resobj);
})
}else{
sentids.splice(sentids.indexOf(ans[1]), 1)
}
break;
default:
break;
}
}
})
},
longpollLoop(info){
r.post({
url: 'https://'+info.server,
headers: {'content-type' : 'application/x-www-form-urlencoded'},
form: {act: 'a_check', key: info.key, ts: info.ts, wait: 25, mode: 2, version: 1}
}, function(err, resp, body){
try{answer = JSON.parse(body);} catch(e){console.log(err)}
if(answer){
if(answer.updates && answer.updates.length == 0){il.longpollLoop({key: info.key, server: info.server, ts: answer.ts})}
else if(answer.failed == 1){il.longpollLoop({key: info.key, server: info.server, ts: answer.ts})}
else if(answer.failed == 2 || answer.failed == 3){
api.call('messages.getLongPollServer', {use_ssl: 1}).then(res => {
console.log(`${colors.green('['+il.ts()+'| init]')} Цикл LongPoll перезапущен с новым ключом`);il.longpollLoop(res);
});
}else{
il.parselp(answer.updates);
il.longpollLoop({key: info.key, server: info.server, ts: answer.ts})
}
}else{
il.longpollLoop({key: info.key, server: info.server, ts: answer.ts})
}
})
},
parse(msgobj){
cmd = msgobj.msg.full.split(' ');
function a(){return global.botname.includes(cmd[0].replace(/[^a-zа-я]/gi, '').trim().toLowerCase())}
global.use(msgobj.msg.full, a(), function(allow){
if(allow === true){
if(global.botname !== false){
if(msgobj.type == 'conv') {
if(a() && dict.check(cmd.slice(1).join(' '))) {
bot.sendmsg('conv', msgobj.id, bot.getAnswer(msgobj)+dict.check(cmd.slice(1).join(' ')));
}else if(a()){
msgevent.emit(cmd[1].toLowerCase(), msgobj);
}
}else{
if(dict.check(msgobj.msg.full)){
bot.sendmsg('user', msgobj.id, dict.check(msgobj.msg.full))
}else{ | msgevent.emit(cmd[0].toLowerCase(), msgobj);
}
} | random_line_split |
|
column_extractor.py | that can be tweaked
LINE_THICKNESS = 3 # how thick to make the line around the found contours in the debug output
PADDING = 10 # padding to add around the found possible column to help account for image skew and such
CREATE_COLUMN_OUTLINE_IMAGES = True # if we detect that we didn't find all the columns. Create a debug image (tiff) showing the columns that were found
def columnIndexes(a):
"""
creates pair of indexes for left and right index of the image column
For example [13, 1257, 2474, 3695, 4907, 6149]
becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]
"""
nrows = (a.size-2)+1
return a[1*np.arange(nrows)[:,None] + np.arange(2)]
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print("invert image")
# Should we edit these parameters?
#3/18/21 - experimented on threshold, 140 is good.
_,temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print("applying dilation morph")
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15) #the more iterations the more the text gets stretched in the Y axis, 15 seems about right.
'''
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
'''
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis = COLUMNS)
sums[0] = 1000 # some random value so that find_peaks properly detects the peak for the left most column
sums = sums * -4 # invert so that minimums become maximums and exagerate the data so it is more clear what the peaks are
peaks, _ = find_peaks(sums, distance=600) # the column indexs of the img array, spaced at least 800 away from the previous peak
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
'''
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
'''
print("PeakNum, Sum, QRemove for " + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print("----------")
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print("ERROR: something went wrong with finding the peaks for image: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg 0\n")
return files
peaks[0] = 0 # automatically make the left most column index the start of the image
peaks[-1] =sums.size -1 # automatically make the right most column index the end of the image
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print("found image that is causing problems: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg " + str(peaks.size) + "\n")
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0]-PADDING, 0)
xend = min(columnIndexPair[1]+PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (basename, xstart,xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print("writing out cropped image: ", filepath)
# Apply adaptative thresholding to the image with a threshold of 25/100
#crop_img = adaptative_thresholding(crop_img, 25)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed,(xstart,ystart),(xend,yend), GREEN, LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
# For removing the old image?
# os.remove(os.path.join(directory, basename + ".jp2"))
return files
def invert_experiment():
test_img = cv2.imread('./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')
for thresh in range(1, 200, 20):
print('writing thresh= ' + str(thresh))
_,temp_img = cv2.threshold(test_img, thresh, 255, cv2.THRESH_BINARY_INV)
cv2.imwrite('./ocr/test_images/thresh='+str(thresh)+'.jpg', temp_img)
def test(img, basename):
#h, w, _ = img.shape
#test_img = cv2.imread('./ocr/data/8k71pf94q/2_commonwealth_8k71pf94q_accessFull.jpg')
test_img = convertToGrayscale(img)
#ret,test_img = cv2.threshold(test_img,25,255,0)
#cv2.imwrite('./ocr/test_images/contours/'+basename+'prepixelcrop.jpg', test_img)
#test_img = test_img[10:h-10, 10: w-10]
#y_nonzero, x_nonzero = np.nonzero(test_img)
#test_img = test_img[np.min(y_nonzero):np.max(y_nonzero), np.min(x_nonzero):np.max(x_nonzero)]
test_img = invert(test_img)
test_img = dilateDirection(test_img)
#contours,hierarchy = cv2.findContours(test_img,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
#cnt = contours[0]
#x,y,w,h = cv2.boundingRect(cnt)
#test_img = cv2.rectangle(img,(10,10),(w-10, h-10), GREEN, LINE_THICKNESS)
#test_img = cv2.drawContours(test_img, contours, -1, GREEN, LINE_THICKNESS)
#crop = test_img[y:y+h,x:x+w]
cv2.imwrite('./ocr/test_images/contours/'+basename+'dilated.jpg', test_img)
'''
for r in range(0, 40, 5):
name = 'rank=' + str(r) + ".jpg"
path = './ocr/test_images/' + name
new_img = ndimage.rank_filter(test_img, rank=r, size=20)
print("writing " + name)
cv2.imwrite(path, new_img)
'''
#cv2.imwrite('./ocr/test_images/inverted.jpg', test_img)
if __name__ == "__main__":
print("STARTING")
for f in os.listdir('./ocr/data/gb19gw39h/'):
| if f.endswith(".jpg"):
#test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])
createColumnImages(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0], './ocr/columns/gb19gw39h/') | conditional_block |
|
column_extractor.py | import sys
os.environ['OPENCV_IO_ENABLE_JASPER']='True' # has to be set before importing cv2 otherwise it won't read the variable
import numpy as np
import cv2
import subprocess
from multiprocessing import Pool
from scipy.signal import find_peaks, find_peaks_cwt
import scipy.ndimage as ndimage
from IPython.display import Image as KImage
#custom kernel that is used to blend together text in the Y axis
DILATE_KERNEL = np.array([
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0]], dtype=np.uint8)
# Run adaptative thresholding (is slow af compared to not using it in pipeline)
def adaptative_thresholding(img, threshold):
# Load image
I = img
# Convert image to grayscale
gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
# Original image size
orignrows, origncols = gray.shape
# Windows size
M = int(np.floor(orignrows/16) + 1)
N = int(np.floor(origncols/16) + 1)
# Image border padding related to windows size
Mextend = round(M/2)-1
Nextend = round(N/2)-1
# Padding image
aux =cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=Nextend,
right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M,N),np.int32)
# Image integral calculation
imageIntegral = cv2.integral(aux, windows,-1)
# Integral image size
nrows, ncols = imageIntegral.shape
# Memory allocation for cumulative region image
result = np.zeros((orignrows, origncols))
# Image cumulative pixels in windows size calculation
for i in range(nrows-M):
for j in range(ncols-N):
result[i, j] = imageIntegral[i+M, j+N] - imageIntegral[i, j+N]+ imageIntegral[i, j] - imageIntegral[i+M,j]
# Output binary image memory allocation
binar = np.ones((orignrows, origncols), dtype=np.bool)
# Gray image weighted by windows size
graymult = (gray).astype('float64')*M*N
# Output image binarization
binar[graymult <= result*(100.0 - threshold)/100.0] = False
# binary image to UINT8 conversion
binar = (255*binar).astype(np.uint8)
return binar
def Q_test(sorted_data):
conf95_level = {3: .97, 4: .829, 5: .71, 6: .625, 7: .568, 8: .526, 9: .493}
q_exp = abs(sorted_data[1] - sorted_data[0]) / abs(sorted_data[-1] - sorted_data[0])
print(str(abs(sorted_data[1] - sorted_data[0])) + ' / ' + str(abs(sorted_data[-1] - sorted_data[0])))
print("q_exp : " + str(q_exp))
return q_exp > conf95_level[min(9, len(sorted_data))]
# static variables for clarity
COLUMNS = 0
GREEN = (0, 255, 0)
# parameters that can be tweaked
LINE_THICKNESS = 3 # how thick to make the line around the found contours in the debug output
PADDING = 10 # padding to add around the found possible column to help account for image skew and such
CREATE_COLUMN_OUTLINE_IMAGES = True # if we detect that we didn't find all the columns. Create a debug image (tiff) showing the columns that were found
def columnIndexes(a):
"""
creates pair of indexes for left and right index of the image column
For example [13, 1257, 2474, 3695, 4907, 6149]
becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]
"""
nrows = (a.size-2)+1
return a[1*np.arange(nrows)[:,None] + np.arange(2)]
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print("invert image")
# Should we edit these parameters?
#3/18/21 - experimented on threshold, 140 is good.
_,temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print("applying dilation morph")
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15) #the more iterations the more the text gets stretched in the Y axis, 15 seems about right.
'''
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
'''
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis = COLUMNS)
sums[0] = 1000 # some random value so that find_peaks properly detects the peak for the left most column
sums = sums * -4 # invert so that minimums become maximums and exagerate the data so it is more clear what the peaks are
peaks, _ = find_peaks(sums, distance=600) # the column indexs of the img array, spaced at least 800 away from the previous peak
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
'''
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
'''
print("PeakNum, Sum, QRemove for " + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print("----------")
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print("ERROR: something went wrong with finding the peaks for image: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg 0\n")
return files
peaks[0] = 0 # automatically make the left most column index the start of the image
peaks[-1] =sums.size -1 # automatically make the right most column index the end of the image
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print("found image that is causing problems: ", os.path.join(directory, basename))
f | import io
from pathlib import Path | random_line_split |
|
column_extractor.py | )
# Original image size
orignrows, origncols = gray.shape
# Windows size
M = int(np.floor(orignrows/16) + 1)
N = int(np.floor(origncols/16) + 1)
# Image border padding related to windows size
Mextend = round(M/2)-1
Nextend = round(N/2)-1
# Padding image
aux =cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=Nextend,
right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M,N),np.int32)
# Image integral calculation
imageIntegral = cv2.integral(aux, windows,-1)
# Integral image size
nrows, ncols = imageIntegral.shape
# Memory allocation for cumulative region image
result = np.zeros((orignrows, origncols))
# Image cumulative pixels in windows size calculation
for i in range(nrows-M):
for j in range(ncols-N):
result[i, j] = imageIntegral[i+M, j+N] - imageIntegral[i, j+N]+ imageIntegral[i, j] - imageIntegral[i+M,j]
# Output binary image memory allocation
binar = np.ones((orignrows, origncols), dtype=np.bool)
# Gray image weighted by windows size
graymult = (gray).astype('float64')*M*N
# Output image binarization
binar[graymult <= result*(100.0 - threshold)/100.0] = False
# binary image to UINT8 conversion
binar = (255*binar).astype(np.uint8)
return binar
def Q_test(sorted_data):
conf95_level = {3: .97, 4: .829, 5: .71, 6: .625, 7: .568, 8: .526, 9: .493}
q_exp = abs(sorted_data[1] - sorted_data[0]) / abs(sorted_data[-1] - sorted_data[0])
print(str(abs(sorted_data[1] - sorted_data[0])) + ' / ' + str(abs(sorted_data[-1] - sorted_data[0])))
print("q_exp : " + str(q_exp))
return q_exp > conf95_level[min(9, len(sorted_data))]
# static variables for clarity
COLUMNS = 0
GREEN = (0, 255, 0)
# parameters that can be tweaked
LINE_THICKNESS = 3 # how thick to make the line around the found contours in the debug output
PADDING = 10 # padding to add around the found possible column to help account for image skew and such
CREATE_COLUMN_OUTLINE_IMAGES = True # if we detect that we didn't find all the columns. Create a debug image (tiff) showing the columns that were found
def columnIndexes(a):
"""
creates pair of indexes for left and right index of the image column
For example [13, 1257, 2474, 3695, 4907, 6149]
becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]
"""
nrows = (a.size-2)+1
return a[1*np.arange(nrows)[:,None] + np.arange(2)]
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print("invert image")
# Should we edit these parameters?
#3/18/21 - experimented on threshold, 140 is good.
_,temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
|
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis = COLUMNS)
sums[0] = 1000 # some random value so that find_peaks properly detects the peak for the left most column
sums = sums * -4 # invert so that minimums become maximums and exagerate the data so it is more clear what the peaks are
peaks, _ = find_peaks(sums, distance=600) # the column indexs of the img array, spaced at least 800 away from the previous peak
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
'''
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
'''
print("PeakNum, Sum, QRemove for " + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print("----------")
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print("ERROR: something went wrong with finding the peaks for image: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg 0\n")
return files
peaks[0] = 0 # automatically make the left most column index the start of the image
peaks[-1] =sums.size -1 # automatically make the right most column index the end of the image
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print("found image that is causing problems: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg " + str(peaks.size) + "\n")
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0]-PADDING, 0)
xend = min(columnIndexPair[1]+PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (basename, xstart,xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print("writing out cropped image: ", filepath)
# Apply adaptative thresholding to the image with a threshold of 25/100
#crop_img = adaptative_thresholding(crop_img, 25)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed,(xstart,ystart),(xend,yend), GREEN, LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
# For removing the old image?
# os.remove(os.path.join(directory, basename + ".jp2"))
return files
def invert_experiment():
test_img = cv2.imread('./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')
for thresh in range(1, 200, 20):
print('writing thresh= ' + str(thresh))
_,temp_img = cv2.threshold(test_img, thresh, 255, cv2.THRESH_BINARY_INV)
cv2.imwrite('./ocr/test_images | """
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print("applying dilation morph")
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15) #the more iterations the more the text gets stretched in the Y axis, 15 seems about right.
'''
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
'''
return temp_img | identifier_body |
column_extractor.py | (img, threshold):
# Load image
I = img
# Convert image to grayscale
gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
# Original image size
orignrows, origncols = gray.shape
# Windows size
M = int(np.floor(orignrows/16) + 1)
N = int(np.floor(origncols/16) + 1)
# Image border padding related to windows size
Mextend = round(M/2)-1
Nextend = round(N/2)-1
# Padding image
aux =cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=Nextend,
right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M,N),np.int32)
# Image integral calculation
imageIntegral = cv2.integral(aux, windows,-1)
# Integral image size
nrows, ncols = imageIntegral.shape
# Memory allocation for cumulative region image
result = np.zeros((orignrows, origncols))
# Image cumulative pixels in windows size calculation
for i in range(nrows-M):
for j in range(ncols-N):
result[i, j] = imageIntegral[i+M, j+N] - imageIntegral[i, j+N]+ imageIntegral[i, j] - imageIntegral[i+M,j]
# Output binary image memory allocation
binar = np.ones((orignrows, origncols), dtype=np.bool)
# Gray image weighted by windows size
graymult = (gray).astype('float64')*M*N
# Output image binarization
binar[graymult <= result*(100.0 - threshold)/100.0] = False
# binary image to UINT8 conversion
binar = (255*binar).astype(np.uint8)
return binar
def Q_test(sorted_data):
conf95_level = {3: .97, 4: .829, 5: .71, 6: .625, 7: .568, 8: .526, 9: .493}
q_exp = abs(sorted_data[1] - sorted_data[0]) / abs(sorted_data[-1] - sorted_data[0])
print(str(abs(sorted_data[1] - sorted_data[0])) + ' / ' + str(abs(sorted_data[-1] - sorted_data[0])))
print("q_exp : " + str(q_exp))
return q_exp > conf95_level[min(9, len(sorted_data))]
# static variables for clarity
COLUMNS = 0
GREEN = (0, 255, 0)
# parameters that can be tweaked
LINE_THICKNESS = 3 # how thick to make the line around the found contours in the debug output
PADDING = 10 # padding to add around the found possible column to help account for image skew and such
CREATE_COLUMN_OUTLINE_IMAGES = True # if we detect that we didn't find all the columns. Create a debug image (tiff) showing the columns that were found
def columnIndexes(a):
"""
creates pair of indexes for left and right index of the image column
For example [13, 1257, 2474, 3695, 4907, 6149]
becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]
"""
nrows = (a.size-2)+1
return a[1*np.arange(nrows)[:,None] + np.arange(2)]
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print("invert image")
# Should we edit these parameters?
#3/18/21 - experimented on threshold, 140 is good.
_,temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print("applying dilation morph")
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15) #the more iterations the more the text gets stretched in the Y axis, 15 seems about right.
'''
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
'''
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis = COLUMNS)
sums[0] = 1000 # some random value so that find_peaks properly detects the peak for the left most column
sums = sums * -4 # invert so that minimums become maximums and exagerate the data so it is more clear what the peaks are
peaks, _ = find_peaks(sums, distance=600) # the column indexs of the img array, spaced at least 800 away from the previous peak
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
'''
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
'''
print("PeakNum, Sum, QRemove for " + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print("----------")
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print("ERROR: something went wrong with finding the peaks for image: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg 0\n")
return files
peaks[0] = 0 # automatically make the left most column index the start of the image
peaks[-1] =sums.size -1 # automatically make the right most column index the end of the image
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print("found image that is causing problems: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg " + str(peaks.size) + "\n")
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0]-PADDING, 0)
xend = min(columnIndexPair[1]+PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (basename, xstart,xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print("writing out cropped image: ", filepath)
# Apply adaptative thresholding to the image with a threshold of 25/100
#crop_img = adaptative_thresholding(crop_img, 25)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed,(xstart,ystart),(xend,yend), GREEN, LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
# For removing the old image?
# os.remove(os.path.join(directory, basename + ".jp2"))
return files
def invert_experiment():
test_img = cv2.imread('./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')
for thresh in range(1, 200, 20):
print('writing thresh= | adaptative_thresholding | identifier_name |
|
on_initialize.rs | (unused)]
use crate::process::ShellCommand;
use crate::stdio_server::job;
use crate::stdio_server::provider::{Context, ProviderSource};
use crate::tools::ctags::ProjectCtagsCommand;
use crate::tools::rg::{RgTokioCommand, RG_EXEC_CMD};
use anyhow::Result;
use filter::SourceItem;
use printer::{DisplayLines, Printer};
use serde_json::{json, Value};
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::time::Duration;
use types::ClapItem;
use utils::count_lines;
async fn execute_and_write_cache(
cmd: &str,
cache_file: std::path::PathBuf,
) -> std::io::Result<ProviderSource> {
// Can not use subprocess::Exec::shell here.
//
// Must use TokioCommand otherwise the timeout may not work.
let mut tokio_cmd = crate::process::tokio::shell_command(cmd);
crate::process::tokio::write_stdout_to_file(&mut tokio_cmd, &cache_file).await?;
let total = count_lines(std::fs::File::open(&cache_file)?)?;
Ok(ProviderSource::CachedFile {
total,
path: cache_file,
refreshed: true,
})
}
fn to_small_provider_source(lines: Vec<String>) -> ProviderSource {
let total = lines.len();
let items = lines
.into_iter()
.map(|line| Arc::new(SourceItem::from(line)) as Arc<dyn ClapItem>)
.collect::<Vec<_>>();
ProviderSource::Small { total, items }
}
/// Performs the initialization like collecting the source and total number of source items.
async fn initialize_provider_source(ctx: &Context) -> Result<ProviderSource> {
// Known providers.
match ctx.provider_id() {
"blines" => {
let total = count_lines(std::fs::File::open(&ctx.env.start_buffer_path)?)?;
let path = ctx.env.start_buffer_path.clone();
return Ok(ProviderSource::File { total, path });
}
"tags" => {
let items = crate::tools::ctags::buffer_tag_items(&ctx.env.start_buffer_path, false)?;
let total = items.len();
return Ok(ProviderSource::Small { total, items });
}
"proj_tags" => {
let ctags_cmd = ProjectCtagsCommand::with_cwd(ctx.cwd.to_path_buf());
let provider_source = if ctx.env.no_cache {
let lines = ctags_cmd.execute_and_write_cache().await?;
to_small_provider_source(lines)
} else {
match ctags_cmd.ctags_cache() {
Some((total, path)) => ProviderSource::CachedFile {
total,
path,
refreshed: false,
},
None => {
let lines = ctags_cmd.execute_and_write_cache().await?;
to_small_provider_source(lines)
}
}
};
return Ok(provider_source);
}
"help_tags" => {
let helplang: String = ctx.vim.eval("&helplang").await?;
let runtimepath: String = ctx.vim.eval("&runtimepath").await?;
let doc_tags = std::iter::once("/doc/tags".to_string()).chain(
helplang
.split(',')
.filter(|&lang| lang != "en")
.map(|lang| format!("/doc/tags-{lang}")),
);
let lines = crate::helptags::generate_tag_lines(doc_tags, &runtimepath);
return Ok(to_small_provider_source(lines));
}
_ => {}
}
let source_cmd: Vec<Value> = ctx.vim.bare_call("provider_source").await?;
if let Some(value) = source_cmd.into_iter().next() {
match value {
// Source is a String: g:__t_string, g:__t_func_string
Value::String(command) => {
let shell_cmd = ShellCommand::new(command, ctx.cwd.to_path_buf());
let cache_file = shell_cmd.cache_file_path()?;
const DIRECT_CREATE_NEW_SOURCE: &[&str] = &["files"];
let create_new_source_directly =
DIRECT_CREATE_NEW_SOURCE.contains(&ctx.provider_id());
let provider_source = if create_new_source_directly || ctx.env.no_cache {
execute_and_write_cache(&shell_cmd.command, cache_file).await?
} else {
match shell_cmd.cache_digest() {
Some(digest) => ProviderSource::CachedFile {
total: digest.total,
path: digest.cached_path,
refreshed: false,
},
None => execute_and_write_cache(&shell_cmd.command, cache_file).await?,
}
};
if let ProviderSource::CachedFile { path, .. } = &provider_source {
ctx.vim.set_var("g:__clap_forerunner_tempfile", path)?;
}
return Ok(provider_source);
}
// Source is a List: g:__t_list, g:__t_func_list
Value::Array(arr) => {
let lines = arr
.into_iter()
.filter_map(|v| {
if let Value::String(s) = v {
Some(s)
} else {
None
}
})
.collect::<Vec<_>>();
return Ok(to_small_provider_source(lines));
}
_ => {}
}
}
Ok(ProviderSource::Uninitialized)
}
fn on_initialized_source(
provider_source: ProviderSource,
ctx: &Context,
init_display: bool,
) -> Result<()> {
if let Some(total) = provider_source.total() {
ctx.vim.set_var("g:clap.display.initial_size", total)?;
}
if init_display {
if let Some(items) = provider_source.try_skim(ctx.provider_id(), 100) {
let printer = Printer::new(ctx.env.display_winwidth, ctx.env.icon);
let DisplayLines {
lines,
icon_added,
truncated_map,
..
} = printer.to_display_lines(items);
let using_cache = provider_source.using_cache();
ctx.vim.exec(
"clap#state#init_display",
json!([lines, truncated_map, icon_added, using_cache]),
)?;
}
if ctx.initializing_prompt_echoed.load(Ordering::SeqCst) {
ctx.vim.bare_exec("clap#helper#echo_clear")?;
}
}
ctx.set_provider_source(provider_source);
Ok(())
}
async fn initialize_list_source(ctx: Context, init_display: bool) -> Result<()> {
let source_cmd: Vec<Value> = ctx.vim.bare_call("provider_source").await?;
// Source must be initialized when it is a List: g:__t_list, g:__t_func_list
if let Some(Value::Array(arr)) = source_cmd.into_iter().next() {
let lines = arr
.into_iter()
.filter_map(|v| {
if let Value::String(s) = v {
Some(s)
} else {
None
}
})
.collect::<Vec<_>>();
on_initialized_source(to_small_provider_source(lines), &ctx, init_display)?;
}
Ok(())
}
pub async fn initialize_provider(ctx: &Context, init_display: bool) -> Result<()> | Ok(Err(e)) => tracing::error!(?e, "Error occurred while initializing the provider source"),
Err(_) => {
// The initialization was not super fast.
tracing::debug!(timeout = ?TIMEOUT, "Did not receive value in time");
let source_cmd: Vec<String> = ctx.vim.bare_call("provider_source_cmd").await?;
let maybe_source_cmd = source_cmd.into_iter().next();
if let Some(source_cmd) = maybe_source_cmd {
ctx.set_provider_source(ProviderSource::Command(source_cmd));
}
/* no longer necessary for grep provider.
// Try creating cache for some potential heavy providers.
match context.provider_id() {
"grep" | "live_grep" => {
context.set_provider_source(ProviderSource::Command(RG_EXEC_CMD.to_string()));
let context = context.clone();
let rg_cmd = RgTokioCommand::new(context.cwd.to_path_buf());
let job_id = utils::calculate_hash(&rg_cmd);
job::try_start(
async move {
if let Ok(digest) = rg_cmd.create_cache().await {
let new = ProviderSource::CachedFile {
total: digest.total,
path: digest.cached_path,
refreshed: true,
};
if !context.terminated.load(Ordering::SeqCst) {
context.set_provider_source(new);
}
}
},
job_id,
);
}
_ => {}
}
*/
}
| {
// Skip the initialization.
match ctx.provider_id() {
"grep" | "live_grep" => return Ok(()),
_ => {}
}
if ctx.env.source_is_list {
let ctx = ctx.clone();
ctx.set_provider_source(ProviderSource::Initializing);
// Initialize the list-style providers in another task so that the further messages won't
// be blocked by the initialization in case it takes too long.
tokio::spawn(initialize_list_source(ctx, init_display));
return Ok(());
}
const TIMEOUT: Duration = Duration::from_millis(300);
match tokio::time::timeout(TIMEOUT, initialize_provider_source(ctx)).await {
Ok(Ok(provider_source)) => on_initialized_source(provider_source, ctx, init_display)?, | identifier_body |
on_initialize.rs | (unused)]
use crate::process::ShellCommand;
use crate::stdio_server::job;
use crate::stdio_server::provider::{Context, ProviderSource};
use crate::tools::ctags::ProjectCtagsCommand;
use crate::tools::rg::{RgTokioCommand, RG_EXEC_CMD};
use anyhow::Result;
use filter::SourceItem;
use printer::{DisplayLines, Printer};
use serde_json::{json, Value};
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::time::Duration;
use types::ClapItem;
use utils::count_lines;
async fn execute_and_write_cache(
cmd: &str,
cache_file: std::path::PathBuf,
) -> std::io::Result<ProviderSource> {
// Can not use subprocess::Exec::shell here.
//
// Must use TokioCommand otherwise the timeout may not work.
let mut tokio_cmd = crate::process::tokio::shell_command(cmd);
crate::process::tokio::write_stdout_to_file(&mut tokio_cmd, &cache_file).await?;
let total = count_lines(std::fs::File::open(&cache_file)?)?;
Ok(ProviderSource::CachedFile {
total,
path: cache_file,
refreshed: true,
})
}
fn to_small_provider_source(lines: Vec<String>) -> ProviderSource {
let total = lines.len();
let items = lines
.into_iter()
.map(|line| Arc::new(SourceItem::from(line)) as Arc<dyn ClapItem>)
.collect::<Vec<_>>();
ProviderSource::Small { total, items }
}
/// Performs the initialization like collecting the source and total number of source items.
async fn initialize_provider_source(ctx: &Context) -> Result<ProviderSource> {
// Known providers.
match ctx.provider_id() {
"blines" => {
let total = count_lines(std::fs::File::open(&ctx.env.start_buffer_path)?)?;
let path = ctx.env.start_buffer_path.clone();
return Ok(ProviderSource::File { total, path });
}
"tags" => {
let items = crate::tools::ctags::buffer_tag_items(&ctx.env.start_buffer_path, false)?;
let total = items.len();
return Ok(ProviderSource::Small { total, items });
}
"proj_tags" => {
let ctags_cmd = ProjectCtagsCommand::with_cwd(ctx.cwd.to_path_buf());
let provider_source = if ctx.env.no_cache {
let lines = ctags_cmd.execute_and_write_cache().await?;
to_small_provider_source(lines)
} else {
match ctags_cmd.ctags_cache() {
Some((total, path)) => ProviderSource::CachedFile {
total,
path,
refreshed: false,
},
None => {
let lines = ctags_cmd.execute_and_write_cache().await?;
to_small_provider_source(lines)
}
}
};
return Ok(provider_source);
}
"help_tags" => {
let helplang: String = ctx.vim.eval("&helplang").await?;
let runtimepath: String = ctx.vim.eval("&runtimepath").await?;
let doc_tags = std::iter::once("/doc/tags".to_string()).chain(
helplang
.split(',')
.filter(|&lang| lang != "en")
.map(|lang| format!("/doc/tags-{lang}")),
);
let lines = crate::helptags::generate_tag_lines(doc_tags, &runtimepath);
return Ok(to_small_provider_source(lines));
}
_ => {}
}
let source_cmd: Vec<Value> = ctx.vim.bare_call("provider_source").await?;
if let Some(value) = source_cmd.into_iter().next() {
match value {
// Source is a String: g:__t_string, g:__t_func_string
Value::String(command) => {
let shell_cmd = ShellCommand::new(command, ctx.cwd.to_path_buf());
let cache_file = shell_cmd.cache_file_path()?;
const DIRECT_CREATE_NEW_SOURCE: &[&str] = &["files"];
let create_new_source_directly =
DIRECT_CREATE_NEW_SOURCE.contains(&ctx.provider_id());
let provider_source = if create_new_source_directly || ctx.env.no_cache {
execute_and_write_cache(&shell_cmd.command, cache_file).await?
} else {
match shell_cmd.cache_digest() {
Some(digest) => ProviderSource::CachedFile {
total: digest.total,
path: digest.cached_path,
refreshed: false,
},
None => execute_and_write_cache(&shell_cmd.command, cache_file).await?,
}
};
if let ProviderSource::CachedFile { path, .. } = &provider_source {
ctx.vim.set_var("g:__clap_forerunner_tempfile", path)?;
}
return Ok(provider_source);
}
// Source is a List: g:__t_list, g:__t_func_list
Value::Array(arr) => {
let lines = arr
.into_iter()
.filter_map(|v| {
if let Value::String(s) = v {
Some(s)
} else {
None
}
})
.collect::<Vec<_>>();
return Ok(to_small_provider_source(lines));
}
_ => {}
}
}
Ok(ProviderSource::Uninitialized)
}
fn on_initialized_source(
provider_source: ProviderSource,
ctx: &Context,
init_display: bool,
) -> Result<()> {
if let Some(total) = provider_source.total() {
ctx.vim.set_var("g:clap.display.initial_size", total)?;
}
if init_display {
if let Some(items) = provider_source.try_skim(ctx.provider_id(), 100) {
let printer = Printer::new(ctx.env.display_winwidth, ctx.env.icon);
let DisplayLines {
lines,
icon_added,
truncated_map,
..
} = printer.to_display_lines(items);
let using_cache = provider_source.using_cache();
ctx.vim.exec(
"clap#state#init_display",
json!([lines, truncated_map, icon_added, using_cache]),
)?;
}
if ctx.initializing_prompt_echoed.load(Ordering::SeqCst) {
ctx.vim.bare_exec("clap#helper#echo_clear")?;
}
}
ctx.set_provider_source(provider_source);
Ok(())
}
async fn initialize_list_source(ctx: Context, init_display: bool) -> Result<()> {
let source_cmd: Vec<Value> = ctx.vim.bare_call("provider_source").await?;
// Source must be initialized when it is a List: g:__t_list, g:__t_func_list
if let Some(Value::Array(arr)) = source_cmd.into_iter().next() {
let lines = arr
.into_iter()
.filter_map(|v| {
if let Value::String(s) = v {
Some(s)
} else {
None
}
})
.collect::<Vec<_>>();
on_initialized_source(to_small_provider_source(lines), &ctx, init_display)?;
}
Ok(())
}
pub async fn | (ctx: &Context, init_display: bool) -> Result<()> {
// Skip the initialization.
match ctx.provider_id() {
"grep" | "live_grep" => return Ok(()),
_ => {}
}
if ctx.env.source_is_list {
let ctx = ctx.clone();
ctx.set_provider_source(ProviderSource::Initializing);
// Initialize the list-style providers in another task so that the further messages won't
// be blocked by the initialization in case it takes too long.
tokio::spawn(initialize_list_source(ctx, init_display));
return Ok(());
}
const TIMEOUT: Duration = Duration::from_millis(300);
match tokio::time::timeout(TIMEOUT, initialize_provider_source(ctx)).await {
Ok(Ok(provider_source)) => on_initialized_source(provider_source, ctx, init_display)?,
Ok(Err(e)) => tracing::error!(?e, "Error occurred while initializing the provider source"),
Err(_) => {
// The initialization was not super fast.
tracing::debug!(timeout = ?TIMEOUT, "Did not receive value in time");
let source_cmd: Vec<String> = ctx.vim.bare_call("provider_source_cmd").await?;
let maybe_source_cmd = source_cmd.into_iter().next();
if let Some(source_cmd) = maybe_source_cmd {
ctx.set_provider_source(ProviderSource::Command(source_cmd));
}
/* no longer necessary for grep provider.
// Try creating cache for some potential heavy providers.
match context.provider_id() {
"grep" | "live_grep" => {
context.set_provider_source(ProviderSource::Command(RG_EXEC_CMD.to_string()));
let context = context.clone();
let rg_cmd = RgTokioCommand::new(context.cwd.to_path_buf());
let job_id = utils::calculate_hash(&rg_cmd);
job::try_start(
async move {
if let Ok(digest) = rg_cmd.create_cache().await {
let new = ProviderSource::CachedFile {
total: digest.total,
path: digest.cached_path,
refreshed: true,
};
if !context.terminated.load(Ordering::SeqCst) {
context.set_provider_source(new);
}
}
},
job_id,
);
}
_ => {}
}
*/
}
| initialize_provider | identifier_name |
on_initialize.rs | allow(unused)]
use crate::process::ShellCommand;
use crate::stdio_server::job;
use crate::stdio_server::provider::{Context, ProviderSource};
use crate::tools::ctags::ProjectCtagsCommand;
use crate::tools::rg::{RgTokioCommand, RG_EXEC_CMD};
use anyhow::Result;
use filter::SourceItem;
use printer::{DisplayLines, Printer};
use serde_json::{json, Value};
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::time::Duration;
use types::ClapItem;
use utils::count_lines;
async fn execute_and_write_cache(
cmd: &str,
cache_file: std::path::PathBuf,
) -> std::io::Result<ProviderSource> {
// Can not use subprocess::Exec::shell here.
//
// Must use TokioCommand otherwise the timeout may not work.
let mut tokio_cmd = crate::process::tokio::shell_command(cmd);
crate::process::tokio::write_stdout_to_file(&mut tokio_cmd, &cache_file).await?;
let total = count_lines(std::fs::File::open(&cache_file)?)?;
Ok(ProviderSource::CachedFile {
total,
path: cache_file,
refreshed: true,
})
}
fn to_small_provider_source(lines: Vec<String>) -> ProviderSource {
let total = lines.len();
let items = lines
.into_iter()
.map(|line| Arc::new(SourceItem::from(line)) as Arc<dyn ClapItem>)
.collect::<Vec<_>>();
ProviderSource::Small { total, items }
}
/// Performs the initialization like collecting the source and total number of source items.
async fn initialize_provider_source(ctx: &Context) -> Result<ProviderSource> {
// Known providers.
match ctx.provider_id() {
"blines" => {
let total = count_lines(std::fs::File::open(&ctx.env.start_buffer_path)?)?;
let path = ctx.env.start_buffer_path.clone();
return Ok(ProviderSource::File { total, path });
}
"tags" => {
let items = crate::tools::ctags::buffer_tag_items(&ctx.env.start_buffer_path, false)?;
let total = items.len();
return Ok(ProviderSource::Small { total, items });
}
"proj_tags" => {
let ctags_cmd = ProjectCtagsCommand::with_cwd(ctx.cwd.to_path_buf());
let provider_source = if ctx.env.no_cache {
let lines = ctags_cmd.execute_and_write_cache().await?;
to_small_provider_source(lines)
} else {
match ctags_cmd.ctags_cache() {
Some((total, path)) => ProviderSource::CachedFile {
total,
path,
refreshed: false,
},
None => {
let lines = ctags_cmd.execute_and_write_cache().await?;
to_small_provider_source(lines) | };
return Ok(provider_source);
}
"help_tags" => {
let helplang: String = ctx.vim.eval("&helplang").await?;
let runtimepath: String = ctx.vim.eval("&runtimepath").await?;
let doc_tags = std::iter::once("/doc/tags".to_string()).chain(
helplang
.split(',')
.filter(|&lang| lang != "en")
.map(|lang| format!("/doc/tags-{lang}")),
);
let lines = crate::helptags::generate_tag_lines(doc_tags, &runtimepath);
return Ok(to_small_provider_source(lines));
}
_ => {}
}
let source_cmd: Vec<Value> = ctx.vim.bare_call("provider_source").await?;
if let Some(value) = source_cmd.into_iter().next() {
match value {
// Source is a String: g:__t_string, g:__t_func_string
Value::String(command) => {
let shell_cmd = ShellCommand::new(command, ctx.cwd.to_path_buf());
let cache_file = shell_cmd.cache_file_path()?;
const DIRECT_CREATE_NEW_SOURCE: &[&str] = &["files"];
let create_new_source_directly =
DIRECT_CREATE_NEW_SOURCE.contains(&ctx.provider_id());
let provider_source = if create_new_source_directly || ctx.env.no_cache {
execute_and_write_cache(&shell_cmd.command, cache_file).await?
} else {
match shell_cmd.cache_digest() {
Some(digest) => ProviderSource::CachedFile {
total: digest.total,
path: digest.cached_path,
refreshed: false,
},
None => execute_and_write_cache(&shell_cmd.command, cache_file).await?,
}
};
if let ProviderSource::CachedFile { path, .. } = &provider_source {
ctx.vim.set_var("g:__clap_forerunner_tempfile", path)?;
}
return Ok(provider_source);
}
// Source is a List: g:__t_list, g:__t_func_list
Value::Array(arr) => {
let lines = arr
.into_iter()
.filter_map(|v| {
if let Value::String(s) = v {
Some(s)
} else {
None
}
})
.collect::<Vec<_>>();
return Ok(to_small_provider_source(lines));
}
_ => {}
}
}
Ok(ProviderSource::Uninitialized)
}
fn on_initialized_source(
provider_source: ProviderSource,
ctx: &Context,
init_display: bool,
) -> Result<()> {
if let Some(total) = provider_source.total() {
ctx.vim.set_var("g:clap.display.initial_size", total)?;
}
if init_display {
if let Some(items) = provider_source.try_skim(ctx.provider_id(), 100) {
let printer = Printer::new(ctx.env.display_winwidth, ctx.env.icon);
let DisplayLines {
lines,
icon_added,
truncated_map,
..
} = printer.to_display_lines(items);
let using_cache = provider_source.using_cache();
ctx.vim.exec(
"clap#state#init_display",
json!([lines, truncated_map, icon_added, using_cache]),
)?;
}
if ctx.initializing_prompt_echoed.load(Ordering::SeqCst) {
ctx.vim.bare_exec("clap#helper#echo_clear")?;
}
}
ctx.set_provider_source(provider_source);
Ok(())
}
async fn initialize_list_source(ctx: Context, init_display: bool) -> Result<()> {
let source_cmd: Vec<Value> = ctx.vim.bare_call("provider_source").await?;
// Source must be initialized when it is a List: g:__t_list, g:__t_func_list
if let Some(Value::Array(arr)) = source_cmd.into_iter().next() {
let lines = arr
.into_iter()
.filter_map(|v| {
if let Value::String(s) = v {
Some(s)
} else {
None
}
})
.collect::<Vec<_>>();
on_initialized_source(to_small_provider_source(lines), &ctx, init_display)?;
}
Ok(())
}
pub async fn initialize_provider(ctx: &Context, init_display: bool) -> Result<()> {
// Skip the initialization.
match ctx.provider_id() {
"grep" | "live_grep" => return Ok(()),
_ => {}
}
if ctx.env.source_is_list {
let ctx = ctx.clone();
ctx.set_provider_source(ProviderSource::Initializing);
// Initialize the list-style providers in another task so that the further messages won't
// be blocked by the initialization in case it takes too long.
tokio::spawn(initialize_list_source(ctx, init_display));
return Ok(());
}
const TIMEOUT: Duration = Duration::from_millis(300);
match tokio::time::timeout(TIMEOUT, initialize_provider_source(ctx)).await {
Ok(Ok(provider_source)) => on_initialized_source(provider_source, ctx, init_display)?,
Ok(Err(e)) => tracing::error!(?e, "Error occurred while initializing the provider source"),
Err(_) => {
// The initialization was not super fast.
tracing::debug!(timeout = ?TIMEOUT, "Did not receive value in time");
let source_cmd: Vec<String> = ctx.vim.bare_call("provider_source_cmd").await?;
let maybe_source_cmd = source_cmd.into_iter().next();
if let Some(source_cmd) = maybe_source_cmd {
ctx.set_provider_source(ProviderSource::Command(source_cmd));
}
/* no longer necessary for grep provider.
// Try creating cache for some potential heavy providers.
match context.provider_id() {
"grep" | "live_grep" => {
context.set_provider_source(ProviderSource::Command(RG_EXEC_CMD.to_string()));
let context = context.clone();
let rg_cmd = RgTokioCommand::new(context.cwd.to_path_buf());
let job_id = utils::calculate_hash(&rg_cmd);
job::try_start(
async move {
if let Ok(digest) = rg_cmd.create_cache().await {
let new = ProviderSource::CachedFile {
total: digest.total,
path: digest.cached_path,
refreshed: true,
};
if !context.terminated.load(Ordering::SeqCst) {
context.set_provider_source(new);
}
}
},
job_id,
);
}
_ => {}
}
*/
}
| }
} | random_line_split |
on_initialize.rs | (unused)]
use crate::process::ShellCommand;
use crate::stdio_server::job;
use crate::stdio_server::provider::{Context, ProviderSource};
use crate::tools::ctags::ProjectCtagsCommand;
use crate::tools::rg::{RgTokioCommand, RG_EXEC_CMD};
use anyhow::Result;
use filter::SourceItem;
use printer::{DisplayLines, Printer};
use serde_json::{json, Value};
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::time::Duration;
use types::ClapItem;
use utils::count_lines;
async fn execute_and_write_cache(
cmd: &str,
cache_file: std::path::PathBuf,
) -> std::io::Result<ProviderSource> {
// Can not use subprocess::Exec::shell here.
//
// Must use TokioCommand otherwise the timeout may not work.
let mut tokio_cmd = crate::process::tokio::shell_command(cmd);
crate::process::tokio::write_stdout_to_file(&mut tokio_cmd, &cache_file).await?;
let total = count_lines(std::fs::File::open(&cache_file)?)?;
Ok(ProviderSource::CachedFile {
total,
path: cache_file,
refreshed: true,
})
}
fn to_small_provider_source(lines: Vec<String>) -> ProviderSource {
let total = lines.len();
let items = lines
.into_iter()
.map(|line| Arc::new(SourceItem::from(line)) as Arc<dyn ClapItem>)
.collect::<Vec<_>>();
ProviderSource::Small { total, items }
}
/// Performs the initialization like collecting the source and total number of source items.
async fn initialize_provider_source(ctx: &Context) -> Result<ProviderSource> {
// Known providers.
match ctx.provider_id() {
"blines" => {
let total = count_lines(std::fs::File::open(&ctx.env.start_buffer_path)?)?;
let path = ctx.env.start_buffer_path.clone();
return Ok(ProviderSource::File { total, path });
}
"tags" => {
let items = crate::tools::ctags::buffer_tag_items(&ctx.env.start_buffer_path, false)?;
let total = items.len();
return Ok(ProviderSource::Small { total, items });
}
"proj_tags" => {
let ctags_cmd = ProjectCtagsCommand::with_cwd(ctx.cwd.to_path_buf());
let provider_source = if ctx.env.no_cache {
let lines = ctags_cmd.execute_and_write_cache().await?;
to_small_provider_source(lines)
} else {
match ctags_cmd.ctags_cache() {
Some((total, path)) => ProviderSource::CachedFile {
total,
path,
refreshed: false,
},
None => {
let lines = ctags_cmd.execute_and_write_cache().await?;
to_small_provider_source(lines)
}
}
};
return Ok(provider_source);
}
"help_tags" => {
let helplang: String = ctx.vim.eval("&helplang").await?;
let runtimepath: String = ctx.vim.eval("&runtimepath").await?;
let doc_tags = std::iter::once("/doc/tags".to_string()).chain(
helplang
.split(',')
.filter(|&lang| lang != "en")
.map(|lang| format!("/doc/tags-{lang}")),
);
let lines = crate::helptags::generate_tag_lines(doc_tags, &runtimepath);
return Ok(to_small_provider_source(lines));
}
_ => {}
}
let source_cmd: Vec<Value> = ctx.vim.bare_call("provider_source").await?;
if let Some(value) = source_cmd.into_iter().next() {
match value {
// Source is a String: g:__t_string, g:__t_func_string
Value::String(command) => {
let shell_cmd = ShellCommand::new(command, ctx.cwd.to_path_buf());
let cache_file = shell_cmd.cache_file_path()?;
const DIRECT_CREATE_NEW_SOURCE: &[&str] = &["files"];
let create_new_source_directly =
DIRECT_CREATE_NEW_SOURCE.contains(&ctx.provider_id());
let provider_source = if create_new_source_directly || ctx.env.no_cache {
execute_and_write_cache(&shell_cmd.command, cache_file).await?
} else {
match shell_cmd.cache_digest() {
Some(digest) => ProviderSource::CachedFile {
total: digest.total,
path: digest.cached_path,
refreshed: false,
},
None => execute_and_write_cache(&shell_cmd.command, cache_file).await?,
}
};
if let ProviderSource::CachedFile { path, .. } = &provider_source {
ctx.vim.set_var("g:__clap_forerunner_tempfile", path)?;
}
return Ok(provider_source);
}
// Source is a List: g:__t_list, g:__t_func_list
Value::Array(arr) => {
let lines = arr
.into_iter()
.filter_map(|v| {
if let Value::String(s) = v {
Some(s)
} else {
None
}
})
.collect::<Vec<_>>();
return Ok(to_small_provider_source(lines));
}
_ => {}
}
}
Ok(ProviderSource::Uninitialized)
}
fn on_initialized_source(
provider_source: ProviderSource,
ctx: &Context,
init_display: bool,
) -> Result<()> {
if let Some(total) = provider_source.total() {
ctx.vim.set_var("g:clap.display.initial_size", total)?;
}
if init_display {
if let Some(items) = provider_source.try_skim(ctx.provider_id(), 100) {
let printer = Printer::new(ctx.env.display_winwidth, ctx.env.icon);
let DisplayLines {
lines,
icon_added,
truncated_map,
..
} = printer.to_display_lines(items);
let using_cache = provider_source.using_cache();
ctx.vim.exec(
"clap#state#init_display",
json!([lines, truncated_map, icon_added, using_cache]),
)?;
}
if ctx.initializing_prompt_echoed.load(Ordering::SeqCst) {
ctx.vim.bare_exec("clap#helper#echo_clear")?;
}
}
ctx.set_provider_source(provider_source);
Ok(())
}
async fn initialize_list_source(ctx: Context, init_display: bool) -> Result<()> {
let source_cmd: Vec<Value> = ctx.vim.bare_call("provider_source").await?;
// Source must be initialized when it is a List: g:__t_list, g:__t_func_list
if let Some(Value::Array(arr)) = source_cmd.into_iter().next() {
let lines = arr
.into_iter()
.filter_map(|v| {
if let Value::String(s) = v {
Some(s)
} else |
})
.collect::<Vec<_>>();
on_initialized_source(to_small_provider_source(lines), &ctx, init_display)?;
}
Ok(())
}
pub async fn initialize_provider(ctx: &Context, init_display: bool) -> Result<()> {
// Skip the initialization.
match ctx.provider_id() {
"grep" | "live_grep" => return Ok(()),
_ => {}
}
if ctx.env.source_is_list {
let ctx = ctx.clone();
ctx.set_provider_source(ProviderSource::Initializing);
// Initialize the list-style providers in another task so that the further messages won't
// be blocked by the initialization in case it takes too long.
tokio::spawn(initialize_list_source(ctx, init_display));
return Ok(());
}
const TIMEOUT: Duration = Duration::from_millis(300);
match tokio::time::timeout(TIMEOUT, initialize_provider_source(ctx)).await {
Ok(Ok(provider_source)) => on_initialized_source(provider_source, ctx, init_display)?,
Ok(Err(e)) => tracing::error!(?e, "Error occurred while initializing the provider source"),
Err(_) => {
// The initialization was not super fast.
tracing::debug!(timeout = ?TIMEOUT, "Did not receive value in time");
let source_cmd: Vec<String> = ctx.vim.bare_call("provider_source_cmd").await?;
let maybe_source_cmd = source_cmd.into_iter().next();
if let Some(source_cmd) = maybe_source_cmd {
ctx.set_provider_source(ProviderSource::Command(source_cmd));
}
/* no longer necessary for grep provider.
// Try creating cache for some potential heavy providers.
match context.provider_id() {
"grep" | "live_grep" => {
context.set_provider_source(ProviderSource::Command(RG_EXEC_CMD.to_string()));
let context = context.clone();
let rg_cmd = RgTokioCommand::new(context.cwd.to_path_buf());
let job_id = utils::calculate_hash(&rg_cmd);
job::try_start(
async move {
if let Ok(digest) = rg_cmd.create_cache().await {
let new = ProviderSource::CachedFile {
total: digest.total,
path: digest.cached_path,
refreshed: true,
};
if !context.terminated.load(Ordering::SeqCst) {
context.set_provider_source(new);
}
}
},
job_id,
);
}
_ => {}
}
*/
}
| {
None
} | conditional_block |
start-router.js | ) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
function ownKeys(object, enumerableOnly) |
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// @see https://stackoverflow.com/questions/21485545/is-there-a-way-to-tell-if-an-es6-promise-is-fulfilled-rejected-resolved#21489870
function makeQuerablePromise(promise) {
// Don't create a wrapper for promises that can already be queried.
if (promise.isResolved) return promise;
var isResolved = false;
var isRejected = false; // Observe the promise, saving the fulfillment in a closure scope.
var result = promise.then(function (v) {
isResolved = true;
return v;
}, function (e) {
isRejected = true;
throw e;
});
result.isFulfilled = function () {
return isResolved || isRejected;
};
result.isResolved = function () {
return isResolved;
};
result.isRejected = function () {
return isRejected;
};
return result;
}
var startRouter = function startRouter(views, rootStore) {
var _ref = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var resources = _ref.resources,
_ref$runAllEvents = _ref.runAllEvents,
runAllEvents = _ref$runAllEvents === void 0 ? false : _ref$runAllEvents,
config = _objectWithoutProperties(_ref, ["resources", "runAllEvents"]);
var store = new _routerStore.RouterStore();
typeof rootStore === 'function' ? rootStore = rootStore(store) : rootStore.routerStore = store;
var browserHistory = (0, _history.createBrowserHistory)();
var history = (0, _sync.syncHistoryWithStore)(browserHistory, store);
var _buildRoutesAndViewSl = (0, _utils.buildRoutesAndViewSlots)(views),
routes = _buildRoutesAndViewSl.routes,
currentView = _buildRoutesAndViewSl.currentView;
store.configure(_objectSpread({}, config, {
routes: routes,
currentView: currentView
}));
var getPropValuesFromArray = function getPropValuesFromArray(objArr, prop) {
return objArr.reduce(function (arr, obj) {
arr.push(obj[prop]);
return arr;
}, []);
};
var buildAction = function buildAction(fn) {
var runAction;
if (typeof fn === 'string') {
var path = fn.split('.');
var obj = path[0];
var action = path[1];
if (resources.hasOwnProperty(obj) && typeof resources[obj][action] === 'function') {
runAction = resources[obj][action];
} else {
runAction = function runAction() {
console.error('Resource "', path.join('.'), '" does not exists!');
return Promise.resolve();
};
}
} else if (typeof fn === 'function') {
runAction = fn;
}
return runAction;
};
var apply = function apply(task, params) {
var runAction = buildAction(task);
var result = typeof runAction === 'function' ? runAction(params, rootStore) : null;
return (0, _utils.isPromise)(result) ? result : Promise.resolve(result);
};
function processHistoryCallback(location, action) {
var matchedRoutes = (0, _utils.getObjectKeys)(store.routes).reduce(function (arr, routeName) {
var route = store.routes[routeName];
var keys = route.path.match(location.pathname);
if (keys) {
var params = _objectSpread({}, (0, _utils.buildParamsObject)(keys, route.path.tokens, route.defaultParams), {}, (0, _queryString.parse)(location.search));
arr.push({
route: route,
params: params
});
}
return arr;
}, []); // TODO: if more than one route is matched, what to do?
// if (matchedRoutes.length > 1) {
// }
var match = matchedRoutes.shift(); // TODO: when 404 happens, should we redirect or replace?
// default redirect
if (!match) {
console.error('404 Not Found!');
store.goTo('notFound');
return; // route = store.routes.notFound;
}
store.nextState = {
routeName: match.route.pathname,
params: (0, _mobx.toJS)(match.params)
}; // build new path for matched route
var newPath = [];
if (match.route.fallbackState === null) {
match.route.fallbackState = store.currentRoute ? {
routeName: store.currentRoute.pathname,
params: (0, _mobx.toJS)(store.params)
} : match.route.defaultState;
}
if (match.route.fallbackState) {
var _match$route$fallback = match.route.fallbackState,
routeName = _match$route$fallback.routeName,
params = _match$route$fallback.params;
var route = store.routes[routeName];
if (route) {
newPath = newPath.concat((0, _utils.buildLookupPath)(route));
match.params = _objectSpread({}, params, {}, match.params);
}
}
newPath = newPath.concat((0, _utils.buildLookupPath)(match.route));
newPath = _toConsumableArray(new Set(newPath)); // remove duplicates
var currentRoute = (0, _utils.buildLookupPath)(store.currentRoute); // add routes from previous path for onExit event to be triggered
var oldPath = currentRoute.reverse().filter(function (route) {
return route.isActive && !newPath.includes(route);
});
if (!runAllEvents) {
newPath = newPath.filter(function (route, i) {
return route.isActive && currentRoute.includes(route) && route["final"] && i === newPath.length - 1 || !route.isActive || i === newPath.length - 1 && route === store.currentRoute;
});
} // build params
var pathParams = newPath.reduce(function (obj, route) {
return _objectSpread({}, route.defaultParams, {}, obj);
}, match.params);
if (newPath.length > 0 && oldPath.length > 0 && newPath[newPath.length - 1].slot !== oldPath[0].slot && oldPath[0].fallbackState !== false) {
var _routeName = oldPath[0].fallbackState.routeName;
var _route = store.routes[_routeName];
var contextOldPath = (0, _utils.buildLookupPath)(_route).reverse().filter(function (route) {
return route.isActive;
});
oldPath = oldPath.concat(contextOldPath);
} // build fns
var fns = _utils.buildFnsArray.apply(void 0, _toConsumableArray(getPropValuesFromArray(oldPath, 'onExit')));
var _loop = function _loop(i) {
var route = newPath[i];
fns = fns.concat((0, _utils.buildFnsArray)(route.authCallback, route.beforeEnter, runAllEvents && route.isActive && newPath.length - 1 !== i || function (params, rootStore) {
return void store.onMatch(params, rootStore, route);
}));
};
for (var i = 0; i < | { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } | identifier_body |
start-router.js | (arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// @see https://stackoverflow.com/questions/21485545/is-there-a-way-to-tell-if-an-es6-promise-is-fulfilled-rejected-resolved#21489870
function makeQuerablePromise(promise) {
// Don't create a wrapper for promises that can already be queried.
if (promise.isResolved) return promise;
var isResolved = false;
var isRejected = false; // Observe the promise, saving the fulfillment in a closure scope.
var result = promise.then(function (v) {
isResolved = true;
return v;
}, function (e) {
isRejected = true;
throw e;
});
result.isFulfilled = function () {
return isResolved || isRejected;
};
result.isResolved = function () {
return isResolved;
};
result.isRejected = function () {
return isRejected;
};
return result;
}
var startRouter = function startRouter(views, rootStore) {
var _ref = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var resources = _ref.resources,
_ref$runAllEvents = _ref.runAllEvents,
runAllEvents = _ref$runAllEvents === void 0 ? false : _ref$runAllEvents,
config = _objectWithoutProperties(_ref, ["resources", "runAllEvents"]);
var store = new _routerStore.RouterStore();
typeof rootStore === 'function' ? rootStore = rootStore(store) : rootStore.routerStore = store;
var browserHistory = (0, _history.createBrowserHistory)();
var history = (0, _sync.syncHistoryWithStore)(browserHistory, store);
var _buildRoutesAndViewSl = (0, _utils.buildRoutesAndViewSlots)(views),
routes = _buildRoutesAndViewSl.routes,
currentView = _buildRoutesAndViewSl.currentView;
store.configure(_objectSpread({}, config, {
routes: routes,
currentView: currentView
}));
var getPropValuesFromArray = function getPropValuesFromArray(objArr, prop) {
return objArr.reduce(function (arr, obj) {
arr.push(obj[prop]);
return arr;
}, []);
};
var buildAction = function buildAction(fn) {
var runAction;
if (typeof fn === 'string') {
var path = fn.split('.');
var obj = path[0];
var action = path[1];
if (resources.hasOwnProperty(obj) && typeof resources[obj][action] === 'function') {
runAction = resources[obj][action];
} else {
runAction = function runAction() {
console.error('Resource "', path.join('.'), '" does not exists!');
return Promise.resolve();
};
}
} else if (typeof fn === 'function') {
runAction = fn;
}
return runAction;
};
var apply = function apply(task, params) {
var runAction = buildAction(task);
var result = typeof runAction === 'function' ? runAction(params, rootStore) : null;
return (0, _utils.isPromise)(result) ? result : Promise.resolve(result);
};
function processHistoryCallback(location, action) {
var matchedRoutes = (0, _utils.getObjectKeys)(store.routes).reduce(function (arr, routeName) {
var route = store.routes[routeName];
var keys = route.path.match(location.pathname);
if (keys) {
var params = _objectSpread({}, (0, _utils.buildParamsObject)(keys, route.path.tokens, route.defaultParams), {}, (0, _queryString.parse)(location.search));
arr.push({
route: route,
params: params
});
}
return arr;
}, []); // TODO: if more than one route is matched, what to do?
// if (matchedRoutes.length > 1) {
// }
var match = matchedRoutes.shift(); // TODO: when 404 happens, should we redirect or replace?
// default redirect
if (!match) {
console.error('404 Not Found!');
store.goTo('notFound');
return; // route = store.routes.notFound;
}
store.nextState = {
routeName: match.route.pathname,
params: (0, _mobx.toJS)(match.params)
}; // build new path for matched route
var newPath = [];
if (match.route.fallbackState === null) {
match.route.fallbackState = store.currentRoute ? {
routeName: store.currentRoute.pathname,
params: (0, _mobx.toJS)(store.params)
} : match.route.defaultState;
}
if (match.route.fallbackState) {
var _match$route$fallback = match.route.fallbackState,
routeName = _match$route$fallback.routeName,
params = _match$route$fallback.params;
var route = store.routes[routeName];
if (route) {
newPath = newPath.concat((0, _utils.buildLookupPath)(route));
match.params = _objectSpread({}, params, {}, match.params);
}
}
newPath = newPath.concat((0, _utils.buildLookupPath)(match.route));
newPath = _toConsumableArray(new Set(newPath)); // remove duplicates
var currentRoute = (0, _utils.buildLookupPath)(store.currentRoute); // add routes from previous path for onExit event to be triggered
var oldPath = currentRoute.reverse().filter(function (route) {
return route.isActive && !newPath.includes(route);
});
if (!runAllEvents) {
newPath = newPath.filter(function (route, i) {
return route.isActive && currentRoute.includes(route) && route["final"] && i === newPath.length - 1 || !route.isActive || i === newPath.length - 1 && route === store.currentRoute;
});
} // build params
var pathParams = newPath.reduce(function (obj, route) {
return _objectSpread({}, route.defaultParams, {}, obj);
}, match.params);
if (newPath.length > 0 && oldPath.length > 0 && newPath[newPath.length - 1].slot !== oldPath[0].slot && oldPath[0].fallbackState !== false) {
var _routeName = oldPath[0].fallbackState.routeName;
var _route = store.routes[_routeName];
var contextOldPath = (0, _utils.buildLookupPath)(_route).reverse().filter(function (route) {
return route.isActive;
});
oldPath = oldPath.concat(contextOldPath);
} // build fns
var fns = _utils.buildFnsArray.apply(void 0, _toConsumableArray(getPropValuesFromArray(oldPath, 'onExit')));
var _loop = function _loop(i) {
var route = newPath[i];
fns = fns.concat((0, _utils.buildFnsArray)(route.authCallback, route.beforeEnter, runAllEvents && route.isActive && newPath.length - 1 !== i || function (params, rootStore) {
return void store.onMatch(params, rootStore, route);
}));
};
for (var i = 0 | _arrayWithoutHoles | identifier_name |
|
start-router.js | ) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// @see https://stackoverflow.com/questions/21485545/is-there-a-way-to-tell-if-an-es6-promise-is-fulfilled-rejected-resolved#21489870
function makeQuerablePromise(promise) {
// Don't create a wrapper for promises that can already be queried.
if (promise.isResolved) return promise;
var isResolved = false;
var isRejected = false; // Observe the promise, saving the fulfillment in a closure scope.
var result = promise.then(function (v) {
isResolved = true;
return v;
}, function (e) {
isRejected = true;
throw e;
});
result.isFulfilled = function () {
return isResolved || isRejected;
};
result.isResolved = function () {
return isResolved;
};
result.isRejected = function () {
return isRejected;
};
return result;
} | _ref$runAllEvents = _ref.runAllEvents,
runAllEvents = _ref$runAllEvents === void 0 ? false : _ref$runAllEvents,
config = _objectWithoutProperties(_ref, ["resources", "runAllEvents"]);
var store = new _routerStore.RouterStore();
typeof rootStore === 'function' ? rootStore = rootStore(store) : rootStore.routerStore = store;
var browserHistory = (0, _history.createBrowserHistory)();
var history = (0, _sync.syncHistoryWithStore)(browserHistory, store);
var _buildRoutesAndViewSl = (0, _utils.buildRoutesAndViewSlots)(views),
routes = _buildRoutesAndViewSl.routes,
currentView = _buildRoutesAndViewSl.currentView;
store.configure(_objectSpread({}, config, {
routes: routes,
currentView: currentView
}));
var getPropValuesFromArray = function getPropValuesFromArray(objArr, prop) {
return objArr.reduce(function (arr, obj) {
arr.push(obj[prop]);
return arr;
}, []);
};
var buildAction = function buildAction(fn) {
var runAction;
if (typeof fn === 'string') {
var path = fn.split('.');
var obj = path[0];
var action = path[1];
if (resources.hasOwnProperty(obj) && typeof resources[obj][action] === 'function') {
runAction = resources[obj][action];
} else {
runAction = function runAction() {
console.error('Resource "', path.join('.'), '" does not exists!');
return Promise.resolve();
};
}
} else if (typeof fn === 'function') {
runAction = fn;
}
return runAction;
};
var apply = function apply(task, params) {
var runAction = buildAction(task);
var result = typeof runAction === 'function' ? runAction(params, rootStore) : null;
return (0, _utils.isPromise)(result) ? result : Promise.resolve(result);
};
function processHistoryCallback(location, action) {
var matchedRoutes = (0, _utils.getObjectKeys)(store.routes).reduce(function (arr, routeName) {
var route = store.routes[routeName];
var keys = route.path.match(location.pathname);
if (keys) {
var params = _objectSpread({}, (0, _utils.buildParamsObject)(keys, route.path.tokens, route.defaultParams), {}, (0, _queryString.parse)(location.search));
arr.push({
route: route,
params: params
});
}
return arr;
}, []); // TODO: if more than one route is matched, what to do?
// if (matchedRoutes.length > 1) {
// }
var match = matchedRoutes.shift(); // TODO: when 404 happens, should we redirect or replace?
// default redirect
if (!match) {
console.error('404 Not Found!');
store.goTo('notFound');
return; // route = store.routes.notFound;
}
store.nextState = {
routeName: match.route.pathname,
params: (0, _mobx.toJS)(match.params)
}; // build new path for matched route
var newPath = [];
if (match.route.fallbackState === null) {
match.route.fallbackState = store.currentRoute ? {
routeName: store.currentRoute.pathname,
params: (0, _mobx.toJS)(store.params)
} : match.route.defaultState;
}
if (match.route.fallbackState) {
var _match$route$fallback = match.route.fallbackState,
routeName = _match$route$fallback.routeName,
params = _match$route$fallback.params;
var route = store.routes[routeName];
if (route) {
newPath = newPath.concat((0, _utils.buildLookupPath)(route));
match.params = _objectSpread({}, params, {}, match.params);
}
}
newPath = newPath.concat((0, _utils.buildLookupPath)(match.route));
newPath = _toConsumableArray(new Set(newPath)); // remove duplicates
var currentRoute = (0, _utils.buildLookupPath)(store.currentRoute); // add routes from previous path for onExit event to be triggered
var oldPath = currentRoute.reverse().filter(function (route) {
return route.isActive && !newPath.includes(route);
});
if (!runAllEvents) {
newPath = newPath.filter(function (route, i) {
return route.isActive && currentRoute.includes(route) && route["final"] && i === newPath.length - 1 || !route.isActive || i === newPath.length - 1 && route === store.currentRoute;
});
} // build params
var pathParams = newPath.reduce(function (obj, route) {
return _objectSpread({}, route.defaultParams, {}, obj);
}, match.params);
if (newPath.length > 0 && oldPath.length > 0 && newPath[newPath.length - 1].slot !== oldPath[0].slot && oldPath[0].fallbackState !== false) {
var _routeName = oldPath[0].fallbackState.routeName;
var _route = store.routes[_routeName];
var contextOldPath = (0, _utils.buildLookupPath)(_route).reverse().filter(function (route) {
return route.isActive;
});
oldPath = oldPath.concat(contextOldPath);
} // build fns
var fns = _utils.buildFnsArray.apply(void 0, _toConsumableArray(getPropValuesFromArray(oldPath, 'onExit')));
var _loop = function _loop(i) {
var route = newPath[i];
fns = fns.concat((0, _utils.buildFnsArray)(route.authCallback, route.beforeEnter, runAllEvents && route.isActive && newPath.length - 1 !== i || function (params, rootStore) {
return void store.onMatch(params, rootStore, route);
}));
};
for (var i = 0; i < newPath |
var startRouter = function startRouter(views, rootStore) {
var _ref = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var resources = _ref.resources, | random_line_split |
index.ts | (device));
return { inEndpoint, outEndpoint };
};
const initializeRNDIS = (device: usb.Device): usb.InEndpoint => {
const interfaceNumber = 0;
const iface0 = device.interface(interfaceNumber);
iface0.claim();
const iEndpoint = iface0.endpoints[0];
if (!(iEndpoint instanceof usb.InEndpoint)) {
throw new Error('endpoint is not an usb.OutEndpoint');
} else {
iEndpoint.startPoll(1, 256);
}
const CONTROL_BUFFER_SIZE = 1025;
const message = new Message();
const initMsg = message.getRNDISInit(); // RNDIS INIT Message
// Windows Control Transfer
// https://msdn.microsoft.com/en-us/library/aa447434.aspx
// http://www.beyondlogic.org/usbnutshell/usb6.shtml
const bmRequestTypeSend = 0x21; // USB_TYPE=CLASS | USB_RECIPIENT=INTERFACE
const bmRequestTypeReceive = 0xa1; // USB_DATA=DeviceToHost | USB_TYPE=CLASS | USB_RECIPIENT=INTERFACE
// Sending rndis_init_msg (SEND_ENCAPSULATED_COMMAND)
device.controlTransfer(bmRequestTypeSend, 0, 0, 0, initMsg, error => {
if (error) {
throw new Error(`Control transfer error on SEND_ENCAPSULATED ${error}`);
}
});
// Receive rndis_init_cmplt (GET_ENCAPSULATED_RESPONSE)
device.controlTransfer(bmRequestTypeReceive, 0x01, 0, 0, CONTROL_BUFFER_SIZE, error => {
if (error) {
throw new Error(`Control transfer error on GET_ENCAPSULATED ${error}`);
}
});
const setMsg = message.getRNDISSet(); // RNDIS SET Message
// Send rndis_set_msg (SEND_ENCAPSULATED_COMMAND)
device.controlTransfer(bmRequestTypeSend, 0, 0, 0, setMsg, error => {
if (error) {
throw new Error(`Control transfer error on SEND_ENCAPSULATED ${error}`);
}
});
// Receive rndis_init_cmplt (GET_ENCAPSULATED_RESPONSE)
device.controlTransfer(bmRequestTypeReceive, 0x01, 0, 0, CONTROL_BUFFER_SIZE, error => {
if (error) {
throw new Error(`Control transfer error on GET_ENCAPSULATED ${error}`);
}
});
return iEndpoint;
};
export class UsbBBbootScanner extends EventEmitter {
private usbBBbootDevices = new Map<string, UsbBBbootDevice>();
private boundAttachDevice: (device: usb.Device) => Promise<void>;
private boundDetachDevice: (device: usb.Device) => void;
private interval: number | undefined;
private stepCounter: number = 0;
// We use both events ('attach' and 'detach') and polling getDeviceList() on usb.
// We don't know which one will trigger the this.attachDevice call.
// So we keep track of attached devices ids in attachedDeviceIds to not run it twice.
private attachedDeviceIds = new Set<string>();
constructor() {
super();
this.boundAttachDevice = this.attachDevice.bind(this);
this.boundDetachDevice = this.detachDevice.bind(this);
}
public start(): void {
debug('Waiting for BeagleBone');
// Prepare already connected devices
usb.getDeviceList().map(this.boundAttachDevice);
// At this point all devices from `usg.getDeviceList()` above
// have had an 'attach' event emitted if they were beaglebone.
this.emit('ready');
// Watch for new devices being plugged in and prepare them
usb.on('attach', this.boundAttachDevice);
// Watch for devices detaching
usb.on('detach', this.boundDetachDevice);
// ts-ignore because of a confusion between NodeJS.Timer and number
// @ts-ignore
this.interval = setInterval(() => {
usb.getDeviceList().forEach(this.boundAttachDevice);
}, POLLING_INTERVAL_MS);
}
public stop(): void {
usb.removeListener('attach', this.boundAttachDevice);
usb.removeListener('detach', this.boundDetachDevice);
clearInterval(this.interval);
this.usbBBbootDevices.clear();
}
private step(device: usb.Device, step: number): void {
const usbBBbootDevice = this.getOrCreate(device);
usbBBbootDevice.step = step;
if (step === UsbBBbootDevice.LAST_STEP) {
this.remove(device);
}
}
private get(device: usb.Device): UsbBBbootDevice | undefined {
const key = devicePortId(device);
return this.usbBBbootDevices.get(key);
}
private getOrCreate(device: usb.Device): UsbBBbootDevice {
const key = devicePortId(device);
let usbBBbootDevice = this.usbBBbootDevices.get(key);
if (usbBBbootDevice === undefined) {
usbBBbootDevice = new UsbBBbootDevice(key);
this.usbBBbootDevices.set(key, usbBBbootDevice);
this.emit('attach', usbBBbootDevice);
}
return usbBBbootDevice;
}
private remove(device: usb.Device): void {
const key = devicePortId(device);
const usbBBbootDevice = this.usbBBbootDevices.get(key);
if (usbBBbootDevice !== undefined) {
this.usbBBbootDevices.delete(key);
this.emit('detach', usbBBbootDevice);
}
}
private async attachDevice(device: usb.Device): Promise<void> {
if (this.attachedDeviceIds.has(getDeviceId(device))) {
return;
}
this.attachedDeviceIds.add(getDeviceId(device));
if (isBeagleBoneInMassStorageMode(device)) {
this.step(device, UsbBBbootDevice.LAST_STEP);
return;
}
if (!isUsbBootCapableUSBDevice$(device)) {
return;
}
if (device.deviceDescriptor.iSerialNumber !== 0) {
return;
}
if (isROMUSBDevice(device.deviceDescriptor.idVendor, device.deviceDescriptor.idProduct)) {
this.stepCounter = 0;
this.process(device, 'u-boot-spl.bin');
}
if (isSPLUSBDevice(device.deviceDescriptor.idVendor, device.deviceDescriptor.idProduct)) {
setTimeout(() => {
this.process(device, 'u-boot.img');
}, 500);
}
}
private process(device: usb.Device, fileName: string): void {
try {
device.open();
let rndisInEndpoint: usb.InEndpoint;
if (platform === 'win32' || platform === 'darwin') {
rndisInEndpoint = initializeRNDIS(device);
rndisInEndpoint.on('error', (error: any) => {
debug('RNDIS InEndpoint Error', error);
});
}
const { inEndpoint, outEndpoint } = initializeDevice(device);
let serverConfig: any = {};
serverConfig.bootpFile = fileName;
inEndpoint.startPoll(1, 500); // MAXBUFF
inEndpoint.on('error', (error: any) => {
debug('InEndpoint Error', error);
});
inEndpoint.on('data', (data: any) => {
const message = new Message();
const request = message.identify(data);
switch (request) {
case 'unidentified':
break;
case 'BOOTP':
const { bootPBuff, bootPServerConfig } = message.getBOOTPResponse(data, serverConfig);
serverConfig = bootPServerConfig;
this.transfer(device, outEndpoint, request, bootPBuff, this.stepCounter++);
break;
case 'ARP':
const { arpBuff, arpServerConfig } = message.getARResponse(data, serverConfig);
serverConfig = arpServerConfig;
this.transfer(device, outEndpoint, request, arpBuff, this.stepCounter++);
break;
case 'TFTP':
serverConfig = message.getBootFile(data, serverConfig);
if (!serverConfig.tftp.fileError) {
// tslint:disable-next-line
const { tftpBuff, tftpServerConfig } = message.getTFTPData(serverConfig);
serverConfig = tftpServerConfig;
this.transfer(device, outEndpoint, request, tftpBuff, this.stepCounter++);
} else {
this.transfer(device, outEndpoint, request, message.getTFTPError(serverConfig), this.stepCounter);
}
break;
case 'TFTP_Data':
const { tftpBuff, tftpServerConfig } = message.getTFTPData(serverConfig);
serverConfig = tftpServerConfig;
if (serverConfig.tftp) {
if (serverConfig.tftp.blocks <= serverConfig.tftp.blocks) {
this.transfer(device, outEndpoint, request, tftpBuff, this.stepCounter++);
} else {
if (platform === 'win32' || platform === 'darwin') {
rndisInEndpoint.stopPoll();
}
inEndpoint.stopPoll();
device.close();
}
}
break;
default:
debug('Request', request);
}
});
} catch (error) {
debug('error', error, devicePortId(device));
this.remove(device);
}
}
private | transfer | identifier_name |
|
index.ts | ENT=INTERFACE
const bmRequestTypeReceive = 0xa1; // USB_DATA=DeviceToHost | USB_TYPE=CLASS | USB_RECIPIENT=INTERFACE
// Sending rndis_init_msg (SEND_ENCAPSULATED_COMMAND)
device.controlTransfer(bmRequestTypeSend, 0, 0, 0, initMsg, error => {
if (error) {
throw new Error(`Control transfer error on SEND_ENCAPSULATED ${error}`);
}
});
// Receive rndis_init_cmplt (GET_ENCAPSULATED_RESPONSE)
device.controlTransfer(bmRequestTypeReceive, 0x01, 0, 0, CONTROL_BUFFER_SIZE, error => {
if (error) {
throw new Error(`Control transfer error on GET_ENCAPSULATED ${error}`);
}
});
const setMsg = message.getRNDISSet(); // RNDIS SET Message
// Send rndis_set_msg (SEND_ENCAPSULATED_COMMAND)
device.controlTransfer(bmRequestTypeSend, 0, 0, 0, setMsg, error => {
if (error) {
throw new Error(`Control transfer error on SEND_ENCAPSULATED ${error}`);
}
});
// Receive rndis_init_cmplt (GET_ENCAPSULATED_RESPONSE)
device.controlTransfer(bmRequestTypeReceive, 0x01, 0, 0, CONTROL_BUFFER_SIZE, error => {
if (error) {
throw new Error(`Control transfer error on GET_ENCAPSULATED ${error}`);
}
});
return iEndpoint;
};
export class UsbBBbootScanner extends EventEmitter {
private usbBBbootDevices = new Map<string, UsbBBbootDevice>();
private boundAttachDevice: (device: usb.Device) => Promise<void>;
private boundDetachDevice: (device: usb.Device) => void;
private interval: number | undefined;
private stepCounter: number = 0;
// We use both events ('attach' and 'detach') and polling getDeviceList() on usb.
// We don't know which one will trigger the this.attachDevice call.
// So we keep track of attached devices ids in attachedDeviceIds to not run it twice.
private attachedDeviceIds = new Set<string>();
constructor() {
super();
this.boundAttachDevice = this.attachDevice.bind(this);
this.boundDetachDevice = this.detachDevice.bind(this);
}
public start(): void {
debug('Waiting for BeagleBone');
// Prepare already connected devices
usb.getDeviceList().map(this.boundAttachDevice);
// At this point all devices from `usg.getDeviceList()` above
// have had an 'attach' event emitted if they were beaglebone.
this.emit('ready');
// Watch for new devices being plugged in and prepare them
usb.on('attach', this.boundAttachDevice);
// Watch for devices detaching
usb.on('detach', this.boundDetachDevice);
// ts-ignore because of a confusion between NodeJS.Timer and number
// @ts-ignore
this.interval = setInterval(() => {
usb.getDeviceList().forEach(this.boundAttachDevice);
}, POLLING_INTERVAL_MS);
}
public stop(): void {
usb.removeListener('attach', this.boundAttachDevice);
usb.removeListener('detach', this.boundDetachDevice);
clearInterval(this.interval);
this.usbBBbootDevices.clear();
}
private step(device: usb.Device, step: number): void {
const usbBBbootDevice = this.getOrCreate(device);
usbBBbootDevice.step = step;
if (step === UsbBBbootDevice.LAST_STEP) {
this.remove(device);
}
}
private get(device: usb.Device): UsbBBbootDevice | undefined {
const key = devicePortId(device);
return this.usbBBbootDevices.get(key);
}
private getOrCreate(device: usb.Device): UsbBBbootDevice {
const key = devicePortId(device);
let usbBBbootDevice = this.usbBBbootDevices.get(key);
if (usbBBbootDevice === undefined) {
usbBBbootDevice = new UsbBBbootDevice(key);
this.usbBBbootDevices.set(key, usbBBbootDevice);
this.emit('attach', usbBBbootDevice);
}
return usbBBbootDevice;
}
private remove(device: usb.Device): void {
const key = devicePortId(device);
const usbBBbootDevice = this.usbBBbootDevices.get(key);
if (usbBBbootDevice !== undefined) {
this.usbBBbootDevices.delete(key);
this.emit('detach', usbBBbootDevice);
}
}
private async attachDevice(device: usb.Device): Promise<void> {
if (this.attachedDeviceIds.has(getDeviceId(device))) {
return;
}
this.attachedDeviceIds.add(getDeviceId(device));
if (isBeagleBoneInMassStorageMode(device)) {
this.step(device, UsbBBbootDevice.LAST_STEP);
return;
}
if (!isUsbBootCapableUSBDevice$(device)) {
return;
}
if (device.deviceDescriptor.iSerialNumber !== 0) {
return;
}
if (isROMUSBDevice(device.deviceDescriptor.idVendor, device.deviceDescriptor.idProduct)) {
this.stepCounter = 0;
this.process(device, 'u-boot-spl.bin');
}
if (isSPLUSBDevice(device.deviceDescriptor.idVendor, device.deviceDescriptor.idProduct)) {
setTimeout(() => {
this.process(device, 'u-boot.img');
}, 500);
}
}
private process(device: usb.Device, fileName: string): void {
try {
device.open();
let rndisInEndpoint: usb.InEndpoint;
if (platform === 'win32' || platform === 'darwin') {
rndisInEndpoint = initializeRNDIS(device);
rndisInEndpoint.on('error', (error: any) => {
debug('RNDIS InEndpoint Error', error);
});
}
const { inEndpoint, outEndpoint } = initializeDevice(device);
let serverConfig: any = {};
serverConfig.bootpFile = fileName;
inEndpoint.startPoll(1, 500); // MAXBUFF
inEndpoint.on('error', (error: any) => {
debug('InEndpoint Error', error);
});
inEndpoint.on('data', (data: any) => {
const message = new Message();
const request = message.identify(data);
switch (request) {
case 'unidentified':
break;
case 'BOOTP':
const { bootPBuff, bootPServerConfig } = message.getBOOTPResponse(data, serverConfig);
serverConfig = bootPServerConfig;
this.transfer(device, outEndpoint, request, bootPBuff, this.stepCounter++);
break;
case 'ARP':
const { arpBuff, arpServerConfig } = message.getARResponse(data, serverConfig);
serverConfig = arpServerConfig;
this.transfer(device, outEndpoint, request, arpBuff, this.stepCounter++);
break;
case 'TFTP':
serverConfig = message.getBootFile(data, serverConfig);
if (!serverConfig.tftp.fileError) {
// tslint:disable-next-line
const { tftpBuff, tftpServerConfig } = message.getTFTPData(serverConfig);
serverConfig = tftpServerConfig;
this.transfer(device, outEndpoint, request, tftpBuff, this.stepCounter++);
} else {
this.transfer(device, outEndpoint, request, message.getTFTPError(serverConfig), this.stepCounter);
}
break;
case 'TFTP_Data':
const { tftpBuff, tftpServerConfig } = message.getTFTPData(serverConfig);
serverConfig = tftpServerConfig;
if (serverConfig.tftp) {
if (serverConfig.tftp.blocks <= serverConfig.tftp.blocks) {
this.transfer(device, outEndpoint, request, tftpBuff, this.stepCounter++);
} else {
if (platform === 'win32' || platform === 'darwin') {
rndisInEndpoint.stopPoll();
}
inEndpoint.stopPoll();
device.close();
}
}
break;
default:
debug('Request', request);
}
});
} catch (error) {
debug('error', error, devicePortId(device));
this.remove(device);
}
}
private transfer(
device: usb.Device,
outEndpoint: usb.OutEndpoint,
request: any,
response: any,
step: number,
): Promise<any> {
return new Promise((resolve, reject) => {
outEndpoint.transfer(response, (cb: any) => {
if (!cb) {
if (request === 'BOOTP') {
this.step(device, step);
}
if (request === 'ARP') {
this.step(device, step);
}
if (request === 'TFTP') {
this.step(device, step);
}
if (request === 'TFTP_Data') {
this.step(device, step);
}
} else {
debug('Out transfer Error', cb);
reject(cb);
}
});
resolve(true);
});
}
private detachDevice(device: usb.Device): void {
this.attachedDeviceIds.delete(getDeviceId(device));
if (!isUsbBootCapableUSBDevice$(device)) | {
return;
} | conditional_block |
|
index.ts | throw new Error(`Control transfer error on SEND_ENCAPSULATED ${error}`);
}
});
// Receive rndis_init_cmplt (GET_ENCAPSULATED_RESPONSE)
device.controlTransfer(bmRequestTypeReceive, 0x01, 0, 0, CONTROL_BUFFER_SIZE, error => {
if (error) {
throw new Error(`Control transfer error on GET_ENCAPSULATED ${error}`);
}
});
const setMsg = message.getRNDISSet(); // RNDIS SET Message
// Send rndis_set_msg (SEND_ENCAPSULATED_COMMAND)
device.controlTransfer(bmRequestTypeSend, 0, 0, 0, setMsg, error => {
if (error) {
throw new Error(`Control transfer error on SEND_ENCAPSULATED ${error}`);
}
});
// Receive rndis_init_cmplt (GET_ENCAPSULATED_RESPONSE)
device.controlTransfer(bmRequestTypeReceive, 0x01, 0, 0, CONTROL_BUFFER_SIZE, error => {
if (error) {
throw new Error(`Control transfer error on GET_ENCAPSULATED ${error}`);
}
});
return iEndpoint;
};
export class UsbBBbootScanner extends EventEmitter {
private usbBBbootDevices = new Map<string, UsbBBbootDevice>();
private boundAttachDevice: (device: usb.Device) => Promise<void>;
private boundDetachDevice: (device: usb.Device) => void;
private interval: number | undefined;
private stepCounter: number = 0;
// We use both events ('attach' and 'detach') and polling getDeviceList() on usb.
// We don't know which one will trigger the this.attachDevice call.
// So we keep track of attached devices ids in attachedDeviceIds to not run it twice.
private attachedDeviceIds = new Set<string>();
constructor() {
super();
this.boundAttachDevice = this.attachDevice.bind(this);
this.boundDetachDevice = this.detachDevice.bind(this);
}
public start(): void {
debug('Waiting for BeagleBone');
// Prepare already connected devices
usb.getDeviceList().map(this.boundAttachDevice);
// At this point all devices from `usg.getDeviceList()` above
// have had an 'attach' event emitted if they were beaglebone.
this.emit('ready');
// Watch for new devices being plugged in and prepare them
usb.on('attach', this.boundAttachDevice);
// Watch for devices detaching
usb.on('detach', this.boundDetachDevice);
// ts-ignore because of a confusion between NodeJS.Timer and number
// @ts-ignore
this.interval = setInterval(() => {
usb.getDeviceList().forEach(this.boundAttachDevice);
}, POLLING_INTERVAL_MS);
}
public stop(): void {
usb.removeListener('attach', this.boundAttachDevice);
usb.removeListener('detach', this.boundDetachDevice);
clearInterval(this.interval);
this.usbBBbootDevices.clear();
}
private step(device: usb.Device, step: number): void {
const usbBBbootDevice = this.getOrCreate(device);
usbBBbootDevice.step = step;
if (step === UsbBBbootDevice.LAST_STEP) {
this.remove(device);
}
}
private get(device: usb.Device): UsbBBbootDevice | undefined {
const key = devicePortId(device);
return this.usbBBbootDevices.get(key);
}
private getOrCreate(device: usb.Device): UsbBBbootDevice {
const key = devicePortId(device);
let usbBBbootDevice = this.usbBBbootDevices.get(key);
if (usbBBbootDevice === undefined) {
usbBBbootDevice = new UsbBBbootDevice(key);
this.usbBBbootDevices.set(key, usbBBbootDevice);
this.emit('attach', usbBBbootDevice);
}
return usbBBbootDevice;
}
private remove(device: usb.Device): void {
const key = devicePortId(device);
const usbBBbootDevice = this.usbBBbootDevices.get(key);
if (usbBBbootDevice !== undefined) {
this.usbBBbootDevices.delete(key);
this.emit('detach', usbBBbootDevice);
}
}
private async attachDevice(device: usb.Device): Promise<void> {
if (this.attachedDeviceIds.has(getDeviceId(device))) {
return;
}
this.attachedDeviceIds.add(getDeviceId(device));
if (isBeagleBoneInMassStorageMode(device)) {
this.step(device, UsbBBbootDevice.LAST_STEP);
return;
}
if (!isUsbBootCapableUSBDevice$(device)) {
return;
}
if (device.deviceDescriptor.iSerialNumber !== 0) {
return;
}
if (isROMUSBDevice(device.deviceDescriptor.idVendor, device.deviceDescriptor.idProduct)) {
this.stepCounter = 0;
this.process(device, 'u-boot-spl.bin');
}
if (isSPLUSBDevice(device.deviceDescriptor.idVendor, device.deviceDescriptor.idProduct)) {
setTimeout(() => {
this.process(device, 'u-boot.img');
}, 500);
}
}
private process(device: usb.Device, fileName: string): void {
try {
device.open();
let rndisInEndpoint: usb.InEndpoint;
if (platform === 'win32' || platform === 'darwin') {
rndisInEndpoint = initializeRNDIS(device);
rndisInEndpoint.on('error', (error: any) => {
debug('RNDIS InEndpoint Error', error);
});
}
const { inEndpoint, outEndpoint } = initializeDevice(device);
let serverConfig: any = {};
serverConfig.bootpFile = fileName;
inEndpoint.startPoll(1, 500); // MAXBUFF
inEndpoint.on('error', (error: any) => {
debug('InEndpoint Error', error);
});
inEndpoint.on('data', (data: any) => {
const message = new Message();
const request = message.identify(data);
switch (request) {
case 'unidentified':
break;
case 'BOOTP':
const { bootPBuff, bootPServerConfig } = message.getBOOTPResponse(data, serverConfig);
serverConfig = bootPServerConfig;
this.transfer(device, outEndpoint, request, bootPBuff, this.stepCounter++);
break;
case 'ARP':
const { arpBuff, arpServerConfig } = message.getARResponse(data, serverConfig);
serverConfig = arpServerConfig;
this.transfer(device, outEndpoint, request, arpBuff, this.stepCounter++);
break;
case 'TFTP':
serverConfig = message.getBootFile(data, serverConfig);
if (!serverConfig.tftp.fileError) {
// tslint:disable-next-line
const { tftpBuff, tftpServerConfig } = message.getTFTPData(serverConfig);
serverConfig = tftpServerConfig;
this.transfer(device, outEndpoint, request, tftpBuff, this.stepCounter++);
} else {
this.transfer(device, outEndpoint, request, message.getTFTPError(serverConfig), this.stepCounter);
}
break;
case 'TFTP_Data':
const { tftpBuff, tftpServerConfig } = message.getTFTPData(serverConfig);
serverConfig = tftpServerConfig;
if (serverConfig.tftp) {
if (serverConfig.tftp.blocks <= serverConfig.tftp.blocks) {
this.transfer(device, outEndpoint, request, tftpBuff, this.stepCounter++);
} else {
if (platform === 'win32' || platform === 'darwin') {
rndisInEndpoint.stopPoll();
}
inEndpoint.stopPoll();
device.close();
}
}
break;
default:
debug('Request', request);
}
});
} catch (error) {
debug('error', error, devicePortId(device));
this.remove(device);
}
}
private transfer(
device: usb.Device,
outEndpoint: usb.OutEndpoint,
request: any,
response: any,
step: number,
): Promise<any> {
return new Promise((resolve, reject) => {
outEndpoint.transfer(response, (cb: any) => {
if (!cb) {
if (request === 'BOOTP') {
this.step(device, step);
}
if (request === 'ARP') {
this.step(device, step);
}
if (request === 'TFTP') {
this.step(device, step);
}
if (request === 'TFTP_Data') {
this.step(device, step);
}
} else {
debug('Out transfer Error', cb);
reject(cb);
}
});
resolve(true);
});
}
private detachDevice(device: usb.Device): void | {
this.attachedDeviceIds.delete(getDeviceId(device));
if (!isUsbBootCapableUSBDevice$(device)) {
return;
}
setTimeout(() => {
const usbBBbootDevice = this.get(device);
if (usbBBbootDevice !== undefined && usbBBbootDevice.step === UsbBBbootDevice.LAST_STEP) {
debug('device', devicePortId(device), 'did not reattached after', DEVICE_UNPLUG_TIMEOUT, 'ms.');
this.remove(device);
}
}, DEVICE_UNPLUG_TIMEOUT);
} | identifier_body |
|
index.ts | }
}
iface.claim();
const inEndpoint = iface.endpoints[0];
const outEndpoint = iface.endpoints[1];
if (!(inEndpoint instanceof usb.InEndpoint)) {
throw new Error('endpoint is not an usb.OutEndpoint');
}
if (!(outEndpoint instanceof usb.OutEndpoint)) {
throw new Error('endpoint is not an usb.OutEndpoint');
}
debug('Initialized device correctly', devicePortId(device));
return { inEndpoint, outEndpoint };
};
const initializeRNDIS = (device: usb.Device): usb.InEndpoint => {
const interfaceNumber = 0;
const iface0 = device.interface(interfaceNumber);
iface0.claim();
const iEndpoint = iface0.endpoints[0];
if (!(iEndpoint instanceof usb.InEndpoint)) {
throw new Error('endpoint is not an usb.OutEndpoint');
} else {
iEndpoint.startPoll(1, 256);
}
const CONTROL_BUFFER_SIZE = 1025;
const message = new Message();
const initMsg = message.getRNDISInit(); // RNDIS INIT Message
// Windows Control Transfer
// https://msdn.microsoft.com/en-us/library/aa447434.aspx
// http://www.beyondlogic.org/usbnutshell/usb6.shtml
const bmRequestTypeSend = 0x21; // USB_TYPE=CLASS | USB_RECIPIENT=INTERFACE
const bmRequestTypeReceive = 0xa1; // USB_DATA=DeviceToHost | USB_TYPE=CLASS | USB_RECIPIENT=INTERFACE
// Sending rndis_init_msg (SEND_ENCAPSULATED_COMMAND)
device.controlTransfer(bmRequestTypeSend, 0, 0, 0, initMsg, error => {
if (error) {
throw new Error(`Control transfer error on SEND_ENCAPSULATED ${error}`);
}
});
// Receive rndis_init_cmplt (GET_ENCAPSULATED_RESPONSE)
device.controlTransfer(bmRequestTypeReceive, 0x01, 0, 0, CONTROL_BUFFER_SIZE, error => {
if (error) {
throw new Error(`Control transfer error on GET_ENCAPSULATED ${error}`);
}
});
const setMsg = message.getRNDISSet(); // RNDIS SET Message
// Send rndis_set_msg (SEND_ENCAPSULATED_COMMAND)
device.controlTransfer(bmRequestTypeSend, 0, 0, 0, setMsg, error => {
if (error) {
throw new Error(`Control transfer error on SEND_ENCAPSULATED ${error}`);
}
});
// Receive rndis_init_cmplt (GET_ENCAPSULATED_RESPONSE)
device.controlTransfer(bmRequestTypeReceive, 0x01, 0, 0, CONTROL_BUFFER_SIZE, error => {
if (error) {
throw new Error(`Control transfer error on GET_ENCAPSULATED ${error}`);
}
});
return iEndpoint;
};
export class UsbBBbootScanner extends EventEmitter {
private usbBBbootDevices = new Map<string, UsbBBbootDevice>();
private boundAttachDevice: (device: usb.Device) => Promise<void>;
private boundDetachDevice: (device: usb.Device) => void;
private interval: number | undefined;
private stepCounter: number = 0;
// We use both events ('attach' and 'detach') and polling getDeviceList() on usb.
// We don't know which one will trigger the this.attachDevice call.
// So we keep track of attached devices ids in attachedDeviceIds to not run it twice.
private attachedDeviceIds = new Set<string>();
constructor() {
super();
this.boundAttachDevice = this.attachDevice.bind(this);
this.boundDetachDevice = this.detachDevice.bind(this);
}
public start(): void {
debug('Waiting for BeagleBone');
// Prepare already connected devices
usb.getDeviceList().map(this.boundAttachDevice);
// At this point all devices from `usg.getDeviceList()` above
// have had an 'attach' event emitted if they were beaglebone.
this.emit('ready');
// Watch for new devices being plugged in and prepare them
usb.on('attach', this.boundAttachDevice);
// Watch for devices detaching
usb.on('detach', this.boundDetachDevice);
// ts-ignore because of a confusion between NodeJS.Timer and number
// @ts-ignore
this.interval = setInterval(() => {
usb.getDeviceList().forEach(this.boundAttachDevice);
}, POLLING_INTERVAL_MS);
}
public stop(): void {
usb.removeListener('attach', this.boundAttachDevice);
usb.removeListener('detach', this.boundDetachDevice);
clearInterval(this.interval);
this.usbBBbootDevices.clear();
}
private step(device: usb.Device, step: number): void {
const usbBBbootDevice = this.getOrCreate(device);
usbBBbootDevice.step = step;
if (step === UsbBBbootDevice.LAST_STEP) {
this.remove(device);
}
}
private get(device: usb.Device): UsbBBbootDevice | undefined {
const key = devicePortId(device);
return this.usbBBbootDevices.get(key);
}
private getOrCreate(device: usb.Device): UsbBBbootDevice {
const key = devicePortId(device);
let usbBBbootDevice = this.usbBBbootDevices.get(key);
if (usbBBbootDevice === undefined) {
usbBBbootDevice = new UsbBBbootDevice(key);
this.usbBBbootDevices.set(key, usbBBbootDevice);
this.emit('attach', usbBBbootDevice);
}
return usbBBbootDevice;
}
private remove(device: usb.Device): void {
const key = devicePortId(device);
const usbBBbootDevice = this.usbBBbootDevices.get(key);
if (usbBBbootDevice !== undefined) {
this.usbBBbootDevices.delete(key);
this.emit('detach', usbBBbootDevice);
}
}
private async attachDevice(device: usb.Device): Promise<void> {
if (this.attachedDeviceIds.has(getDeviceId(device))) {
return;
}
this.attachedDeviceIds.add(getDeviceId(device));
if (isBeagleBoneInMassStorageMode(device)) {
this.step(device, UsbBBbootDevice.LAST_STEP);
return;
}
if (!isUsbBootCapableUSBDevice$(device)) {
return;
}
if (device.deviceDescriptor.iSerialNumber !== 0) {
return;
}
if (isROMUSBDevice(device.deviceDescriptor.idVendor, device.deviceDescriptor.idProduct)) {
this.stepCounter = 0;
this.process(device, 'u-boot-spl.bin');
}
if (isSPLUSBDevice(device.deviceDescriptor.idVendor, device.deviceDescriptor.idProduct)) {
setTimeout(() => {
this.process(device, 'u-boot.img');
}, 500);
}
}
private process(device: usb.Device, fileName: string): void {
try {
device.open();
let rndisInEndpoint: usb.InEndpoint;
if (platform === 'win32' || platform === 'darwin') {
rndisInEndpoint = initializeRNDIS(device);
rndisInEndpoint.on('error', (error: any) => {
debug('RNDIS InEndpoint Error', error);
});
}
const { inEndpoint, outEndpoint } = initializeDevice(device);
let serverConfig: any = {};
serverConfig.bootpFile = fileName;
inEndpoint.startPoll(1, 500); // MAXBUFF
inEndpoint.on('error', (error: any) => {
debug('InEndpoint Error', error);
});
inEndpoint.on('data', (data: any) => {
const message = new Message();
const request = message.identify(data);
switch (request) {
case 'unidentified':
break;
case 'BOOTP':
const { bootPBuff, bootPServerConfig } = message.getBOOTPResponse(data, serverConfig);
serverConfig = bootPServerConfig;
this.transfer(device, outEndpoint, request, bootPBuff, this.stepCounter++);
break;
case 'ARP':
const { arpBuff, arpServerConfig } = message.getARResponse(data, serverConfig);
serverConfig = arpServerConfig;
this.transfer(device, outEndpoint, request, arpBuff, this.stepCounter++);
break;
case 'TFTP':
serverConfig = message.getBootFile(data, serverConfig);
if (!serverConfig.tftp.fileError) {
// tslint:disable-next-line
const { tftpBuff, tftpServerConfig } = message.getTFTPData(serverConfig);
serverConfig = tftpServerConfig;
this.transfer(device, outEndpoint, request, tftpBuff, this.stepCounter++);
} else {
this.transfer(device, outEndpoint, request, message.getTFTPError(serverConfig), this.stepCounter);
}
break;
case 'TFTP_Data':
const { tftpBuff, tftpServerConfig } = message.getTFTPData(serverConfig);
serverConfig = tftpServerConfig; | if (serverConfig.tftp) {
if (serverConfig.tftp.blocks <= serverConfig.tftp.blocks) {
this.transfer(device, outEndpoint, request, tftpBuff, this.stepCounter++); | random_line_split |
|
sudoku.go | []line
m_sets []Solver
m_cells [][]cell
}
func New(puzzle [COL_LENGTH][ROW_LENGTH]int) (*Grid, error){
var g Grid
g.Init();
g.Fill(puzzle)
return &g,nil
}
func (g *Grid) validate() bool{
for x,_ := range g.m_cells{
for y,_:= range g.m_cells[x]{
if !g.m_cells[x][y].validate(){
return false
}
}
}
for i,_ := range g.m_sets{
if !g.m_sets[i].validate(){
return false
}
}
return true
}
func (g *Grid) Init() {
//Init the raw cells themselves that actually store the grid data
g.m_cells = make([][]cell,COL_LENGTH)
for i :=0; i< len(g.m_cells); i++{
g.m_cells[i] = make([]cell, ROW_LENGTH)
for j :=0; j< len(g.m_cells[i]); j++{
c := &g.m_cells[i][j]
c.init(i,j)
}
}
//Init each of the grouping structures that view portions of the grid
/*
Squares are indexed into the grid as folows
S0 S1 S2
S3 S4 S5
S6 S7 S8
*/
g.m_squares = make([]square,NUM_SQUARES)
for squareIdx :=0; squareIdx<NUM_SQUARES; squareIdx++{
g.m_squares[squareIdx].init()
for x :=0; x<SQUARE_SIZE; x++{
for y:= 0; y<SQUARE_SIZE; y++{
//is this correct?
gridX := SQUARE_SIZE * (squareIdx % SQUARE_SIZE) + x
gridY := SQUARE_SIZE * (squareIdx / SQUARE_SIZE) + y
cellPtr := &g.m_cells[gridX][gridY]
g.m_squares[squareIdx].m_cells[x][y] = cellPtr
}
}
}
g.m_rows = make([]line, ROW_LENGTH)
g.m_cols = make([]line,COL_LENGTH)
//Make m_sets just a big long list of all the cell grouping structures
//handy for doing iterations over all different ways of looking at the cells
g.m_sets = make([]Solver,len(g.m_squares) + len(g.m_rows) + len(g.m_cols))
var idx int
for i := 0; i<len(g.m_squares); i++{
s:= &g.m_squares[i]
g.m_sets[idx] = s
idx++
}
for i:=0; i<len(g.m_rows); i++{
r:=&g.m_rows[i]
g.m_sets[idx] = r
idx++
r.init()
for colNum:=0; colNum < COL_LENGTH; colNum++{
r.m_cells[colNum] = &g.m_cells[colNum][i]
}
r.m_rowAligned = true
r.m_rowOrColNum = i
}
for i:= 0; i<len(g.m_cols); i++{
c := &g.m_cols[i]
g.m_sets[idx] = c
idx++
c.init()
for rowNum:=0; rowNum < ROW_LENGTH; rowNum++{
c.m_cells[rowNum] = &g.m_cells[i][rowNum]
}
c.m_rowAligned = false
c.m_rowOrColNum = i
}
}
func (g *Grid) Fill(puzzle [COL_LENGTH][ROW_LENGTH]int){
g.Init()
for x:=0; x<COL_LENGTH; x++{
for y:=0; y<ROW_LENGTH; y++{
var puzzVal = puzzle[y][x]
if puzzVal >=1 && puzzVal<=9{
g.m_cells[x][y].SetKnownTo(puzzVal)
}
}
}
}
func (g Grid) Solved() (bool,error) {
for _,s := range g.m_sets{
solved,err := s.Solved()
if err != nil{
fmt.Println("Error during Solved() check on grid: " + err.Error())
return false,err
}
if !solved{
return false,nil
}
}
return true,nil
}
func (g *Grid) squareExclusionReduce() (bool,error){
changed := false
/*for i,_ := range g.m_squares{
s := &g.m_squares[i]
pairs, err := s.AlignedCellOnlyPairs()
if err != nil{
return false,err
}
for _,p := range pairs{
}
}*/
return changed,nil
}
func(g *Grid) reducePossiblePass() (bool, error){
changed := false
for pass:=0;pass<2;pass++{
for i,_ := range g.m_sets{
reduced,err := g.m_sets[i].reducePossible()
if err != nil{
return false,err
}
changed = changed || reduced
}
sqReduce,err := g.squareExclusionReduce()
if err != nil{
return false,err
}
changed = changed || sqReduce
}
return changed,nil
}
func (g *Grid) Puzzle() [COL_LENGTH][ROW_LENGTH]int{
var puzzle [COL_LENGTH][ROW_LENGTH]int
for x,_ := range puzzle{
for y,_ := range puzzle[x]{
if g.m_cells[x][y].IsKnown(){
var err error
puzzle[y][x],err = g.m_cells[x][y].Known()
if err != nil{
return puzzle
}
}
}
}
return puzzle
}
func (g *Grid) setKnown( x,y, known int) error{
//should probably check if grid is initialised and return error if it isn't
g.m_cells[x][y].SetKnownTo(known)
return nil
}
func (g *Grid) DuplicateGrid() (*Grid,error){
return New(g.Puzzle())
}
func (g* Grid) TotalPossible() (int, error){
totalPoss := 0
for x,_ := range g.m_cells{
for y,_:= range g.m_cells[x]{
if !g.m_cells[x][y].IsKnown(){
val,err := g.m_cells[x][y].Possibles()
if err != nil{
return 0,err
}
numPoss := len(val)
totalPoss += numPoss
}
}
}
return totalPoss,nil
}
func (g *Grid) GenerateGuessGrids() ([]*Grid, error){
totalPoss,err := g.TotalPossible()
guesses := make([]*Grid,0,totalPoss)
if err != nil{
return guesses,err
}
smallPoss := 99
smX:=0
smY:=0
for x,_ := range g.m_cells{
for y,_:= range g.m_cells[x]{
if !g.m_cells[x][y].IsKnown(){
possibles,err := g.m_cells[x][y].Possibles()
if err!=nil{
return guesses,err
}
if len(possibles) < smallPoss{
smallPoss=len(possibles)
smX=x
smY=y
}
}
}
}
if smallPoss < 99{
possibles,err := g.m_cells[smX][smY].Possibles()
if err!=nil{
return guesses,err
}
for _,v := range possibles{
guess,err := g.DuplicateGrid()
if err!=nil{
return guesses,err
}
err = guess.setKnown(smX,smY,v)
if err!=nil{
return guesses,err
}
guesses = append(guesses,guess)
}
}
return guesses,nil
}
type SolveResult struct{
m_grid *Grid
m_solved bool
}
func (s *SolveResult) Grid() *Grid{
return s.m_grid
}
func (s *SolveResult) Solved() bool{
return s.m_solved
}
func startSolveRoutine(ch chan SolveResult, g *Grid) {
defer close(ch)
res, err := g.Solve()
if err != nil{
//this error might be expected, we might have sent in an invalid puzzle
//only care about this response to print or pass on in the root call to solve.
return
}
ch<-*res
}
func (g *Grid) Solve() (*SolveResult,error) | {
var err error
for changed:=true; changed;{
changed, err = g.reducePossiblePass()
if err != nil{
return &SolveResult{nil,false},err
}
if !g.validate(){
return &SolveResult{nil,false},nil //this was probably an invalid guess, just want to stop trying to process this
}
}
solved,err := g.Solved()
if err != nil{
return &SolveResult{nil,false},err
}
if solved{
return &SolveResult{g,true},nil | identifier_body |
|
sudoku.go | }
func (c cell) String() string {
val,err := c.Known()
if(err != nil){
return "x"
}
return strconv.Itoa(val)
}
type cellPtrSlice []*cell
func (cells cellPtrSlice) Solved() (bool, error){
for _,c := range cells{
if !c.IsKnown(){
return false,nil
}
}
return true,nil
}
func (cells cellPtrSlice) Known() ([]int, error){
known := make([]int,0, len(cells))
for _,c := range cells{
if c.IsKnown(){
val,err := c.Known()
if err != nil{
return known,err
}
known = append(known,val)
}
}
return known,nil
}
func (cells cellPtrSlice) TakeKnownFromPossible(known []int) (bool,error){
changed := false
for _,c := range cells{
taken, err := c.TakeKnownFromPossible(known)
if err != nil{
return false,err
}
changed = changed || taken
}
return changed,nil
}
//Squares which represent one of each of the 9 squares in a Grid, each of which
//references a 3x3 collection of cells.
type square struct {
m_cells [][]*cell
}
func (s square) Solved() (bool,error) {
for _,r := range s.m_cells{
solved,err := cellPtrSlice(r).Solved()
if(err != nil){
return false,err
}
if !solved {
return false,nil
}
}
return true,nil
}
func (s *square) init() {
s.m_cells = make([][]*cell,SQUARE_SIZE)
for i,_ := range s.m_cells{
s.m_cells[i] = make(cellPtrSlice, SQUARE_SIZE)
}
}
func (s* square) KnownInSquare() ([]int,error){
known := make([]int,0,SQUARE_SIZE*SQUARE_SIZE)
for x,_ := range s.m_cells{
for y,_ := range s.m_cells[x]{
c := s.m_cells[x][y]
if c.IsKnown(){
val,err := c.Known()
if err != nil{
return known,err
}
known = append(known,val)
}
}
}
return known,nil
}
func (s* square) reducePossible() (bool,error) {
known,err := s.KnownInSquare()
reduced := false
if err != nil {
return false,err
}
for x,_ := range s.m_cells{
cells := s.m_cells[x]
changed, err := cellPtrSlice(cells).TakeKnownFromPossible(known)
if err != nil{
return false,err
}
reduced = reduced || changed
}
return reduced,nil
}
func (s* square) validate() bool{
known,err := s.KnownInSquare()
if err != nil {
return false
}
return validate(known)
}
type pair struct{
m_values [2]int
m_rowAligned bool
m_rowOrColNum int
}
func (s* square) AlignedCellOnlyPairs() ([]pair,error){
foundPairs := make([]pair,0,SQUARE_SIZE*SQUARE_SIZE)
/*
for x1,_ := range s.m_cells{
for y1,_ := range s.m_cells[x1]{
c1 := s.m_cells[x1][y1]
if len(c1.m_possible)==2{
for x2,_ := range s.m_cells{
for y2,_ := range s.m_cells[x2]{
c2 := s.m_cells[x2][y2]
if c1 != c2{
if len(c2.m_possible)==2{
if x2 == x1 || y2 == y1{
matchBoth := true
valIdx :=0
var foundPair pair
for k,v := range c2.m_possible{
if val,ok :=c1.m_possible[k]; !ok{
matchBoth = false
}else{
pair.m_values[valIdx] = k
valIdx++
}
}
if matchBoth{
foundPair.m_rowAligned = y2==y1
if foundPair.m_rowAligned{
foundPair.m_rowOrColNum = c1.m_y
}else{
foundPair.m_rowOrColNum = c1.m_x
}
foundPairs = append(foundPairs,foundPair)
}
}
}
}
}
}
}
}
}*/
return foundPairs,nil
}
//A horizontal or vertical line of 9 cells through the entire Grid.
type line struct {
m_cells cellPtrSlice
m_rowAligned bool
m_rowOrColNum int
}
func(l *line) | (){
l.m_cells = make([]*cell,COL_LENGTH,COL_LENGTH)
}
func (l line) Solved() (bool,error) {
return l.m_cells.Solved()
}
func (l* line) reducePossible() (bool,error) {
known,err := l.m_cells.Known()
if err != nil {
return false,err
}
reduced, err := l.m_cells.TakeKnownFromPossible(known)
if err != nil{
return false,err
}
return reduced,nil
}
func (l* line) validate() bool{
known,err := l.m_cells.Known()
if err != nil {
return false
}
return validate(known)
}
func (l line) String() string{
str := ""
for _,c := range l.m_cells{
if c.IsKnown(){
v,err:= c.Known()
if err == nil{
str+=strconv.Itoa(v)
}
}else{
str+="x"
}
}
return str
}
//Grid which represents the 3x3 collection of squares which represent the entire puzzle
const ROW_LENGTH = 9
const COL_LENGTH = 9
const NUM_SQUARES = COL_LENGTH
const SQUARE_SIZE = 3
type Grid struct {
m_squares []square
m_rows []line
m_cols []line
m_sets []Solver
m_cells [][]cell
}
func New(puzzle [COL_LENGTH][ROW_LENGTH]int) (*Grid, error){
var g Grid
g.Init();
g.Fill(puzzle)
return &g,nil
}
func (g *Grid) validate() bool{
for x,_ := range g.m_cells{
for y,_:= range g.m_cells[x]{
if !g.m_cells[x][y].validate(){
return false
}
}
}
for i,_ := range g.m_sets{
if !g.m_sets[i].validate(){
return false
}
}
return true
}
func (g *Grid) Init() {
//Init the raw cells themselves that actually store the grid data
g.m_cells = make([][]cell,COL_LENGTH)
for i :=0; i< len(g.m_cells); i++{
g.m_cells[i] = make([]cell, ROW_LENGTH)
for j :=0; j< len(g.m_cells[i]); j++{
c := &g.m_cells[i][j]
c.init(i,j)
}
}
//Init each of the grouping structures that view portions of the grid
/*
Squares are indexed into the grid as folows
S0 S1 S2
S3 S4 S5
S6 S7 S8
*/
g.m_squares = make([]square,NUM_SQUARES)
for squareIdx :=0; squareIdx<NUM_SQUARES; squareIdx++{
g.m_squares[squareIdx].init()
for x :=0; x<SQUARE_SIZE; x++{
for y:= 0; y<SQUARE_SIZE; y++{
//is this correct?
gridX := SQUARE_SIZE * (squareIdx % SQUARE_SIZE) + x
gridY := SQUARE_SIZE * (squareIdx / SQUARE_SIZE) + y
cellPtr := &g.m_cells[gridX][gridY]
g.m_squares[squareIdx].m_cells[x][y] = cellPtr
}
}
}
g.m_rows = make([]line, ROW_LENGTH)
g.m_cols = make([]line,COL_LENGTH)
//Make m_sets just a big long list of all the cell grouping structures
//handy for doing iterations over all different ways of looking at the cells
g.m_sets = make([]Solver,len(g.m_squares) + len(g.m_rows) + len(g.m_cols))
var idx int
for i := 0; i<len(g.m_squares); i++{
s:= &g.m_squares[i]
g.m_sets[idx] = s
idx++
}
for i:=0; i<len(g.m_rows); i++{
r:=&g.m_rows[i]
g.m_sets[idx] = r
idx++
r.init()
for colNum:=0; colNum < COL_LENGTH; colNum++{
r.m_cells[colNum] = &g.m_cells[colNum][i]
}
r.m_rowAligned = true
r.m_rowOrColNum = i
}
for i:= 0 | init | identifier_name |
sudoku.go | represent one of each of the 9 squares in a Grid, each of which
//references a 3x3 collection of cells.
type square struct {
m_cells [][]*cell
}
func (s square) Solved() (bool,error) {
for _,r := range s.m_cells{
solved,err := cellPtrSlice(r).Solved()
if(err != nil){
return false,err
}
if !solved {
return false,nil
}
}
return true,nil
}
func (s *square) init() {
s.m_cells = make([][]*cell,SQUARE_SIZE)
for i,_ := range s.m_cells{
s.m_cells[i] = make(cellPtrSlice, SQUARE_SIZE)
}
}
func (s* square) KnownInSquare() ([]int,error){
known := make([]int,0,SQUARE_SIZE*SQUARE_SIZE)
for x,_ := range s.m_cells{
for y,_ := range s.m_cells[x]{
c := s.m_cells[x][y]
if c.IsKnown(){
val,err := c.Known()
if err != nil{
return known,err
}
known = append(known,val)
}
}
}
return known,nil
}
func (s* square) reducePossible() (bool,error) {
known,err := s.KnownInSquare()
reduced := false
if err != nil {
return false,err
}
for x,_ := range s.m_cells{
cells := s.m_cells[x]
changed, err := cellPtrSlice(cells).TakeKnownFromPossible(known)
if err != nil{
return false,err
}
reduced = reduced || changed
}
return reduced,nil
}
func (s* square) validate() bool{
known,err := s.KnownInSquare()
if err != nil {
return false
}
return validate(known)
}
type pair struct{
m_values [2]int
m_rowAligned bool
m_rowOrColNum int
}
func (s* square) AlignedCellOnlyPairs() ([]pair,error){
foundPairs := make([]pair,0,SQUARE_SIZE*SQUARE_SIZE)
/*
for x1,_ := range s.m_cells{
for y1,_ := range s.m_cells[x1]{
c1 := s.m_cells[x1][y1]
if len(c1.m_possible)==2{
for x2,_ := range s.m_cells{
for y2,_ := range s.m_cells[x2]{
c2 := s.m_cells[x2][y2]
if c1 != c2{
if len(c2.m_possible)==2{
if x2 == x1 || y2 == y1{
matchBoth := true
valIdx :=0
var foundPair pair
for k,v := range c2.m_possible{
if val,ok :=c1.m_possible[k]; !ok{
matchBoth = false
}else{
pair.m_values[valIdx] = k
valIdx++
}
}
if matchBoth{
foundPair.m_rowAligned = y2==y1
if foundPair.m_rowAligned{
foundPair.m_rowOrColNum = c1.m_y
}else{
foundPair.m_rowOrColNum = c1.m_x
}
foundPairs = append(foundPairs,foundPair)
}
}
}
}
}
}
}
}
}*/
return foundPairs,nil
}
//A horizontal or vertical line of 9 cells through the entire Grid.
type line struct {
m_cells cellPtrSlice
m_rowAligned bool
m_rowOrColNum int
}
func(l *line) init(){
l.m_cells = make([]*cell,COL_LENGTH,COL_LENGTH)
}
func (l line) Solved() (bool,error) {
return l.m_cells.Solved()
}
func (l* line) reducePossible() (bool,error) {
known,err := l.m_cells.Known()
if err != nil {
return false,err
}
reduced, err := l.m_cells.TakeKnownFromPossible(known)
if err != nil{
return false,err
}
return reduced,nil
}
func (l* line) validate() bool{
known,err := l.m_cells.Known()
if err != nil {
return false
}
return validate(known)
}
func (l line) String() string{
str := ""
for _,c := range l.m_cells{
if c.IsKnown(){
v,err:= c.Known()
if err == nil{
str+=strconv.Itoa(v)
}
}else{
str+="x"
}
}
return str
}
//Grid which represents the 3x3 collection of squares which represent the entire puzzle
const ROW_LENGTH = 9
const COL_LENGTH = 9
const NUM_SQUARES = COL_LENGTH
const SQUARE_SIZE = 3
type Grid struct {
m_squares []square
m_rows []line
m_cols []line
m_sets []Solver
m_cells [][]cell
}
func New(puzzle [COL_LENGTH][ROW_LENGTH]int) (*Grid, error){
var g Grid
g.Init();
g.Fill(puzzle)
return &g,nil
}
func (g *Grid) validate() bool{
for x,_ := range g.m_cells{
for y,_:= range g.m_cells[x]{
if !g.m_cells[x][y].validate(){
return false
}
}
}
for i,_ := range g.m_sets{
if !g.m_sets[i].validate(){
return false
}
}
return true
}
func (g *Grid) Init() {
//Init the raw cells themselves that actually store the grid data
g.m_cells = make([][]cell,COL_LENGTH)
for i :=0; i< len(g.m_cells); i++{
g.m_cells[i] = make([]cell, ROW_LENGTH)
for j :=0; j< len(g.m_cells[i]); j++{
c := &g.m_cells[i][j]
c.init(i,j)
}
}
//Init each of the grouping structures that view portions of the grid
/*
Squares are indexed into the grid as folows
S0 S1 S2
S3 S4 S5
S6 S7 S8
*/
g.m_squares = make([]square,NUM_SQUARES)
for squareIdx :=0; squareIdx<NUM_SQUARES; squareIdx++{
g.m_squares[squareIdx].init()
for x :=0; x<SQUARE_SIZE; x++{
for y:= 0; y<SQUARE_SIZE; y++{
//is this correct?
gridX := SQUARE_SIZE * (squareIdx % SQUARE_SIZE) + x
gridY := SQUARE_SIZE * (squareIdx / SQUARE_SIZE) + y
cellPtr := &g.m_cells[gridX][gridY]
g.m_squares[squareIdx].m_cells[x][y] = cellPtr
}
}
}
g.m_rows = make([]line, ROW_LENGTH)
g.m_cols = make([]line,COL_LENGTH)
//Make m_sets just a big long list of all the cell grouping structures
//handy for doing iterations over all different ways of looking at the cells
g.m_sets = make([]Solver,len(g.m_squares) + len(g.m_rows) + len(g.m_cols))
var idx int
for i := 0; i<len(g.m_squares); i++{
s:= &g.m_squares[i]
g.m_sets[idx] = s
idx++
}
for i:=0; i<len(g.m_rows); i++{
r:=&g.m_rows[i]
g.m_sets[idx] = r
idx++
r.init()
for colNum:=0; colNum < COL_LENGTH; colNum++{
r.m_cells[colNum] = &g.m_cells[colNum][i]
}
r.m_rowAligned = true
r.m_rowOrColNum = i
}
for i:= 0; i<len(g.m_cols); i++{
c := &g.m_cols[i]
g.m_sets[idx] = c
idx++
c.init()
for rowNum:=0; rowNum < ROW_LENGTH; rowNum++{
c.m_cells[rowNum] = &g.m_cells[i][rowNum]
}
c.m_rowAligned = false
c.m_rowOrColNum = i
}
}
func (g *Grid) Fill(puzzle [COL_LENGTH][ROW_LENGTH]int){
g.Init()
for x:=0; x<COL_LENGTH; x++{
for y:=0; y<ROW_LENGTH; y++{
var puzzVal = puzzle[y][x]
if puzzVal >=1 && puzzVal<=9{
g.m_cells[x][y].SetKnownTo(puzzVal)
}
}
}
}
func (g Grid) Solved() (bool,error) {
for _,s := range g.m_sets{
solved,err := s.Solved()
if err != nil | {
fmt.Println("Error during Solved() check on grid: " + err.Error())
return false,err
} | conditional_block |
|
sudoku.go | }
func (c cell) String() string {
val,err := c.Known()
if(err != nil){
return "x"
}
return strconv.Itoa(val)
}
type cellPtrSlice []*cell
func (cells cellPtrSlice) Solved() (bool, error){
for _,c := range cells{
if !c.IsKnown(){
return false,nil
}
}
return true,nil
}
func (cells cellPtrSlice) Known() ([]int, error){
known := make([]int,0, len(cells))
for _,c := range cells{
if c.IsKnown(){
val,err := c.Known()
if err != nil{
return known,err
}
known = append(known,val)
}
}
return known,nil
}
func (cells cellPtrSlice) TakeKnownFromPossible(known []int) (bool,error){
changed := false
for _,c := range cells{
taken, err := c.TakeKnownFromPossible(known)
if err != nil{
return false,err
}
changed = changed || taken
}
return changed,nil
}
//Squares which represent one of each of the 9 squares in a Grid, each of which
//references a 3x3 collection of cells.
type square struct {
m_cells [][]*cell
}
func (s square) Solved() (bool,error) {
for _,r := range s.m_cells{
solved,err := cellPtrSlice(r).Solved()
if(err != nil){
return false,err
}
if !solved {
return false,nil
}
}
return true,nil
}
func (s *square) init() {
s.m_cells = make([][]*cell,SQUARE_SIZE)
for i,_ := range s.m_cells{
s.m_cells[i] = make(cellPtrSlice, SQUARE_SIZE)
}
}
func (s* square) KnownInSquare() ([]int,error){
known := make([]int,0,SQUARE_SIZE*SQUARE_SIZE)
for x,_ := range s.m_cells{
for y,_ := range s.m_cells[x]{
c := s.m_cells[x][y]
if c.IsKnown(){
val,err := c.Known()
if err != nil{
return known,err
}
known = append(known,val)
}
}
}
return known,nil
}
func (s* square) reducePossible() (bool,error) {
known,err := s.KnownInSquare()
reduced := false
if err != nil {
return false,err
}
for x,_ := range s.m_cells{
cells := s.m_cells[x]
changed, err := cellPtrSlice(cells).TakeKnownFromPossible(known)
if err != nil{
return false,err
}
reduced = reduced || changed
}
return reduced,nil
}
func (s* square) validate() bool{
known,err := s.KnownInSquare()
if err != nil {
return false
}
return validate(known)
}
type pair struct{
m_values [2]int
m_rowAligned bool
m_rowOrColNum int
}
func (s* square) AlignedCellOnlyPairs() ([]pair,error){
foundPairs := make([]pair,0,SQUARE_SIZE*SQUARE_SIZE)
/*
for x1,_ := range s.m_cells{
for y1,_ := range s.m_cells[x1]{
c1 := s.m_cells[x1][y1]
if len(c1.m_possible)==2{
for x2,_ := range s.m_cells{
for y2,_ := range s.m_cells[x2]{
c2 := s.m_cells[x2][y2]
if c1 != c2{
if len(c2.m_possible)==2{
if x2 == x1 || y2 == y1{
matchBoth := true
valIdx :=0
var foundPair pair
for k,v := range c2.m_possible{
if val,ok :=c1.m_possible[k]; !ok{
matchBoth = false
}else{
pair.m_values[valIdx] = k
valIdx++
}
}
if matchBoth{
foundPair.m_rowAligned = y2==y1
if foundPair.m_rowAligned{
foundPair.m_rowOrColNum = c1.m_y
}else{
foundPair.m_rowOrColNum = c1.m_x
}
foundPairs = append(foundPairs,foundPair)
}
}
}
}
}
}
}
}
}*/
return foundPairs,nil
}
//A horizontal or vertical line of 9 cells through the entire Grid.
type line struct {
m_cells cellPtrSlice
m_rowAligned bool
m_rowOrColNum int
}
func(l *line) init(){
l.m_cells = make([]*cell,COL_LENGTH,COL_LENGTH)
}
func (l line) Solved() (bool,error) {
return l.m_cells.Solved()
}
func (l* line) reducePossible() (bool,error) {
known,err := l.m_cells.Known()
if err != nil {
return false,err
}
reduced, err := l.m_cells.TakeKnownFromPossible(known)
if err != nil{
return false,err
}
return reduced,nil
}
func (l* line) validate() bool{
known,err := l.m_cells.Known()
if err != nil {
return false
}
return validate(known)
}
func (l line) String() string{
str := ""
for _,c := range l.m_cells{
if c.IsKnown(){
v,err:= c.Known()
if err == nil{
str+=strconv.Itoa(v)
}
}else{
str+="x"
}
}
return str
}
//Grid which represents the 3x3 collection of squares which represent the entire puzzle
const ROW_LENGTH = 9
const COL_LENGTH = 9
const NUM_SQUARES = COL_LENGTH
const SQUARE_SIZE = 3
type Grid struct {
m_squares []square
m_rows []line
m_cols []line
m_sets []Solver
m_cells [][]cell
}
func New(puzzle [COL_LENGTH][ROW_LENGTH]int) (*Grid, error){
var g Grid | g.Init();
g.Fill(puzzle)
return &g,nil
}
func (g *Grid) validate() bool{
for x,_ := range g.m_cells{
for y,_:= range g.m_cells[x]{
if !g.m_cells[x][y].validate(){
return false
}
}
}
for i,_ := range g.m_sets{
if !g.m_sets[i].validate(){
return false
}
}
return true
}
func (g *Grid) Init() {
//Init the raw cells themselves that actually store the grid data
g.m_cells = make([][]cell,COL_LENGTH)
for i :=0; i< len(g.m_cells); i++{
g.m_cells[i] = make([]cell, ROW_LENGTH)
for j :=0; j< len(g.m_cells[i]); j++{
c := &g.m_cells[i][j]
c.init(i,j)
}
}
//Init each of the grouping structures that view portions of the grid
/*
Squares are indexed into the grid as folows
S0 S1 S2
S3 S4 S5
S6 S7 S8
*/
g.m_squares = make([]square,NUM_SQUARES)
for squareIdx :=0; squareIdx<NUM_SQUARES; squareIdx++{
g.m_squares[squareIdx].init()
for x :=0; x<SQUARE_SIZE; x++{
for y:= 0; y<SQUARE_SIZE; y++{
//is this correct?
gridX := SQUARE_SIZE * (squareIdx % SQUARE_SIZE) + x
gridY := SQUARE_SIZE * (squareIdx / SQUARE_SIZE) + y
cellPtr := &g.m_cells[gridX][gridY]
g.m_squares[squareIdx].m_cells[x][y] = cellPtr
}
}
}
g.m_rows = make([]line, ROW_LENGTH)
g.m_cols = make([]line,COL_LENGTH)
//Make m_sets just a big long list of all the cell grouping structures
//handy for doing iterations over all different ways of looking at the cells
g.m_sets = make([]Solver,len(g.m_squares) + len(g.m_rows) + len(g.m_cols))
var idx int
for i := 0; i<len(g.m_squares); i++{
s:= &g.m_squares[i]
g.m_sets[idx] = s
idx++
}
for i:=0; i<len(g.m_rows); i++{
r:=&g.m_rows[i]
g.m_sets[idx] = r
idx++
r.init()
for colNum:=0; colNum < COL_LENGTH; colNum++{
r.m_cells[colNum] = &g.m_cells[colNum][i]
}
r.m_rowAligned = true
r.m_rowOrColNum = i
}
for i:= 0; i | random_line_split |
|
MedicalRecordsQueryForm.js | .type == "long") {
it.type = "int";
}
var f;
if (it.id == "BRZD") {
f = this.createRemoteDicField(it);
} else if (it.xtype == "combination") {
it.index = i;
f = this.createCombinationField(it);
table.items.push(f);
continue;
} else {
f = this.createField(it)
}
f.labelSeparator = ":"
f.index = i;
f.anchor = it.anchor || "75%"
delete f.width
f.colspan = parseInt(it.colspan)
f.rowspan = parseInt(it.rowspan)
f.boxMaxWidth = 120
if (!this.fireEvent("addfield", f, it)) {
continue;
}
table.items.push(f)
}
var cfg = {
buttonAlign : 'center',
labelAlign : this.labelAlign || "left",
labelWidth : this.labelWidth || 80,
frame : true,
shadow : false,
border : false,
collapsible : false,
// autoWidth : true,
// autoHeight : true,
autoScroll : true,
floating : false
}
if (this.isCombined) {
cfg.frame = true
cfg.shadow = false
// cfg.width = this.width
cfg.height = this.height
} else {
// cfg.autoWidth = true
cfg.autoHeight = true
}
if (this.disAutoHeight) {
delete cfg.autoHeight;
}
this.initBars(cfg);
Ext.apply(table, cfg)
this.expansion(table);// add by yangl
this.form = new Ext.FormPanel(table)
this.form.on("afterrender", this.onReady, this)
this.schema = schema;
this.setKeyReadOnly(true)
if (!this.isCombined) {
this.addPanelToWin();
}
var MBBH = this.form.getForm().findField("MBBH");
MBBH.on("lookup", this.onQueryClick, this);
return this.form
},
createRemoteDicField : function(it) {
var mds_reader = this.getRemoteDicReader();
// storeԶ��url
var mdsstore = new Ext.data.Store({
url : ClassLoader.appRootOffsetPath
+ this.remoteUrl + '.search?ZXLB=1',
reader : mds_reader
});
this.remoteDicStore = mdsstore;
Ext.apply(this.remoteDicStore.baseParams, this.queryParams);
var resultTpl = new Ext.XTemplate(
'<tpl for=".">',
'<div class="search-item">',
'<table cellpadding="0" cellspacing="0" border="0" class="search-item-table">',
'<tr>' + this.remoteTpl + '<tr>', '</table>', '</div>',
'</tpl>');
var _ctx = this;
var remoteField = new Ext.form.ComboBox({
// id : "YPMC",
name : it.id,
index : it.index,
fieldLabel : it.alias,
enableKeyEvents : it.enableKeyEvents,
listWidth : 270,
store : mdsstore,
selectOnFocus : true,
typeAhead : false,
loadingText : '搜索中...',
pageSize : 10,
hideTrigger : true,
minListWidth : this.minListWidth || 280,
tpl : resultTpl,
minChars : 1,
lazyInit : false,
boxMaxWidth : 120,
itemSelector : 'div.search-item',
onSelect : function(record) { // override default
// onSelect
// to do
this.bySelect = true;
_ctx.setBackInfo(this, record);
}
});
remoteField.on("focus", function() {
remoteField.innerList.setStyle('overflow-y',
'hidden');
}, this);
remoteField.on("keyup", function(obj, e) {// ʵ�����ּ�
var key = e.getKey();
if ((key >= 48 && key <= 57)
|| (key >= 96 && key <= 105)) {
var searchTypeValue = _ctx.cookie
.getCookie(_ctx.mainApp.uid
+ "_searchType");
if (searchTypeValue != 'BHDM') {
if (obj.isExpanded()) {
if (key == 48 || key == 96)
key = key + 10;
key = key < 59 ? key - 49 : key - 97;
var record = this.getStore().getAt(key);
obj.bySelect = true;
_ctx.setBackInfo(obj, record);
}
}
}
// ֧�ַ�ҳ
if (key == 37) {
obj.pageTb.movePrevious();
} else if (key == 39) {
obj.pageTb.moveNext();
}
// ɾ���¼� 8
if (key == 8) {
if (obj.getValue().trim().length == 0) {
if (obj.isExpanded()) {
obj.collapse();
}
}
}
})
remoteField.isSearchField = true;
remoteField.on("beforequery", function() {
return this.beforeSearchQuery();
}, this);
this.remoteDic = remoteField;
return remoteField
},
getRemoteDicReader : function() {
return new Ext.data.JsonReader({
root : 'disease',
totalProperty : 'count',
id : 'mdssearch_a'
}, [{
name : 'numKey'
}, {
name : 'JBXH'
}, {
name : 'MSZD'
}, {
name : 'JBBM'
}]);
},
beforeSearchQuery : function() {
return true;
},
setBackInfo : function(obj, record) {
// ��ѡ�еļ�¼���õ�������У��̳к�ʵ�־��幦��
obj.collapse();
obj.setValue(record.get("MSZD"));
},
createCombinationField : function(it) {
var checkbox = new Ext.form.Checkbox({
xtype : "checkbox",
id : it.id + "BOXN",
name : it.id + "BOXN",
hideLabel : true
});
checkbox.on("check", this.onBoxCheck, this);
var label1 = new Ext.form.Label({
xtype : "label",
html : it.alias + ":",
width : 60
})
var dateField1 = new Ext.form.DateField({
xtype : "datefield",
id : it.id + "KSN",
name : it.id + "KSN",
emptyText : "请选择日期",
boxMinWidth : 80,
format : 'Y-m-d'
});
dateField1.disable();
var label2 = new Ext.form.Label({
xtype : "label",
text : "->",
width : 20
})
var dateField2 = new Ext.form.DateField({
xtype : "datefield",
id : it.id + "JSN",
name : it.id + "JSN",
emptyText : "请选择日期",
boxMinWidth : 80,
format : 'Y-m-d'
});
dateField2.disable();
var field = new Ext.form.CompositeField({
xtype : 'compositefield',
name : it.id,
anchor : '-20',
hideLabel : true,
index : it.index,
boxMinWidth : 330,
items : [checkbox, label1, dateField1, label2,
dateField2]
});
return field;
},
onBoxCheck : function(box, flag) {
var form = this.form.getForm();;
if (box.id == "XTSJBOXN") {
var XTSJKSN = form.findField("XTSJKSN");
var XTSJJSN = form.findField("XTSJJSN");
if (flag) {
XTSJKSN.setValue();
XTSJKSN.enable();
XTSJJSN.setValue();
XTSJJSN.enable();
} else {
XTSJKSN.setValue();
XTSJKSN.disable();
XTSJJSN.setValue();
XTSJJSN.disable();
}
}
if (box.id == "JLSJBOXN") {
var JLSJKSN = form.findField("JLSJKSN");
var JLSJJSN = form.findField("JLSJJSN");
if (flag) {
JLSJKSN.setValue();
JLSJKSN.enable();
JLSJJSN.setValue();
JLSJJSN.enable();
} else {
| JLSJKSN.setValue();
JLSJKSN.disable();
JLSJJSN.setValue();
JLSJJSN.disable();
}
| conditional_block |
|
MedicalRecordsQueryForm.js | }
break;
case "date" :
v = v.format("Y-m-d");
cnds[1] = [
'$',
"str(" + value
+ ",'yyyy-MM-dd')"];
cnds.push(['s', v]);
break;
}
}
cnd = ['and', cnd, cnds];
}
}
}
cnd = this.addCndByCombination(cnd, form);
this.fireEvent("select", cnd);
this.focusFieldAfter(-1, 800)
},
addCndByCombination : function(cnd, form) {
var XTSJBOXN = form.findField("XTSJBOXN");
if (XTSJBOXN.getValue()) {
var XTSJKSN = form.findField("XTSJKSN");
var XTSJJSN = form.findField("XTSJJSN");
var kssj = XTSJKSN.getValue();
var jssj = XTSJJSN.getValue();
var c = this.getCnd(kssj, jssj, "XTSJ");
cnd = ['and', cnd, c];
}
var JLSJBOXN = form.findField("JLSJBOXN");
if (JLSJBOXN.getValue()) {
var JLSJKSN = form.findField("JLSJKSN");
var JLSJJSN = form.findField("JLSJJSN");
var kssj = JLSJKSN.getValue();
var jssj = JLSJJSN.getValue();
var c = this.getCnd(kssj, jssj, "JLSJ");
cnd = ['and', cnd, c];
}
var WCSJBOXN = form.findField("WCSJBOXN");
if (WCSJBOXN.getValue()) {
var WCSJKSN = form.findField("WCSJKSN");
var WCSJJSN = form.findField("WCSJJSN");
var kssj = WCSJKSN.getValue();
var jssj = WCSJJSN.getValue();
var c = this.getCnd(kssj, jssj, "WCSJ");
cnd = ['and', cnd, c];
}
return cnd;
},
getCnd : function(kssj, jssj, fieldName) {
if (kssj) {
kssj = kssj.format("Y-m-d");
kssj = kssj + " 00:00:00";
} else {
kssj = new Date();
kssj = kssj.format("Y-m-d") + " 00:00:00";
}
if (jssj) {
jssj = jssj.format("Y-m-d");
jssj = jssj + " 23:59:59";
} else {
jssj = new Date();
jssj = jssj.format("Y-m-d") + " 23:59:59";
}
var cnd = [
'and',
['ge', ['$', fieldName],
['todate', ['s',kssj], ['s','yyyy-mm-dd hh24:mi:ss']]],
['le', ['$', fieldName],
['todate', ['s',jssj], ['s','yyyy-mm-dd hh24:mi:ss']]]];
return cnd;
},
getComFieldId : function() {
var fields = ["XTSJBOXN", "XTSJKSN", "XTSJJSN", "JLSJBOXN",
"JLSJKSN", "JLSJJSN", "WCSJBOXN", "WCSJKSN", "WCSJJSN"];
return fields
},
doReset : function() {
this.doNew();
},
afterDoNew : function() {
var form = this.form.getForm();
var MBBH = form.findField("MBBH");
MBBH.setValue();
this.MBBH = null;
var fs = this.getComFieldId();
for (var i = 0; i < fs.length; i++) {
var fd = fs[i];
var f = form.findField(fd);
if (fd.indexOf("BOX") > 0) {
f.setValue();
f.enable();
} else {
f.setValue();
f.disable();
}
}
},
initPanel : function(sc) {
if (this.form) {
if (!this.isCombined) {
this.addPanelToWin();
}
return this.form;
}
var schema = sc
if (!schema) {
var re = util.schema.loadSync(this.entryName)
if (re.code == 200) {
schema = re.schema;
} else {
this.processReturnMsg(re.code, re.msg, this.initPanel)
return;
}
}
var ac = util.Accredit;
var defaultWidth = this.fldDefaultWidth || 200
var items = schema.items;
if (!this.fireEvent("changeDic", items)) {
return
}
var colCount = this.colCount;
var table = {
layout : 'tableform',
layoutConfig : {
columns : colCount,
tableAttrs : {
border : 0,
cellpadding : '2',
cellspacing : "2"
}
},
items : []
}
if (!this.autoFieldWidth) {
var forceViewWidth = (defaultWidth + (this.labelWidth || 80))
* (colCount - 1) + 350;
table.layoutConfig.forceWidth = forceViewWidth
}
var size = items.length
for (var i = 0; i < size; i++) {
var it = items[i]
if ((it.display == 0 || it.display == 1)
|| !ac.canRead(it.acValue)) {
continue;
}
if (it.type == "long") {
it.type = "int";
}
var f;
if (it.id == "BRZD") {
f = this.createRemoteDicField(it);
} else if (it.xtype == "combination") {
it.index = i;
f = this.createCombinationField(it);
table.items.push(f);
continue;
} else {
f = this.createField(it)
}
f.labelSeparator = ":"
f.index = i;
f.anchor = it.anchor || "75%"
delete f.width
f.colspan = parseInt(it.colspan)
f.rowspan = parseInt(it.rowspan)
f.boxMaxWidth = 120
if (!this.fireEvent("addfield", f, it)) {
continue;
}
table.items.push(f)
}
var cfg = {
buttonAlign : 'center',
labelAlign : this.labelAlign || "left",
labelWidth : this.labelWidth || 80,
frame : true,
shadow : false,
border : false,
collapsible : false,
// autoWidth : true,
// autoHeight : true,
autoScroll : true,
floating : false
}
if (this.isCombined) {
cfg.frame = true
cfg.shadow = false
// cfg.width = this.width
cfg.height = this.height
} else {
// cfg.autoWidth = true
cfg.autoHeight = true
}
if (this.disAutoHeight) {
delete cfg.autoHeight;
}
this.initBars(cfg);
Ext.apply(table, cfg)
this.expansion(table);// add by yangl
this.form = new Ext.FormPanel(table)
this.form.on("afterrender", this.onReady, this)
this.schema = schema;
this.setKeyReadOnly(true)
if (!this.isCombined) {
this.addPanelToWin();
}
var MBBH = this.form.getForm().findField("MBBH");
MBBH.on("lookup", this.onQueryClick, this);
return this.form
},
createRemoteDicField : function(it) {
var mds_reader = this.getRemoteDicReader();
// storeԶ��url
var mdsstore = new Ext.data.Store({
url : ClassLoader.appRootOffsetPath
+ this.remoteUrl + '.search?ZXLB=1',
reader : mds_reader
});
this.remoteDicStore = mdsstore;
Ext.apply(this.remoteDicStore.baseParams, this.queryParams);
var resultTpl = new Ext.XTemplate(
'<tpl for=".">',
'<div class="search-item">',
'<table cellpadding="0" cellspacing="0" border="0" class="search-item-table | cnds[0] = 'like';
if (it.id == "BRZD") {
cnds.push(['s', '%'+v + '%']);
} else {
cnds.push(['s', v + '%']); | random_line_split |
|
ParamEffectsProcessedFCD.py | 250, 300]
# [0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0, 20., 50.] #how many taxis in percent of the total vehicles | single element or a hole list
quota = [0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0, 20., 50.]
iteration = 2
vehId = 0
vehIdDict = {}
edgeDumpDict = None
vtypeDict = None
vehList = None
vehSum = None
procFcdDict = None
def main():
global edgeDumpDict, vtypeDict, vehList, vehSum, period, quota, procFcdDict
print("start program")
edgeDumpDict = make(path.FQedgeDumpPickle, path.FQedgeDump, readEdgeDump)
vtypeDict = make(path.FQvtypePickle, path.FQvtype, readVtype)
vehList = make(
path.FQvehPickle, path.FQvtypePickle, getVehicleList, False, vtypeDict)
vehSum = len(vehList)
if mode == U_FCD:
print("load source: ", os.path.basename(
path.FQprocessedFCD), "...", end=' ')
procFcdDict = readProcessedFCD()
print("Done!")
orgPath = path.FQoutput
if mode == W_FCD:
orgPath = path.FQrawFCD
orgPeriod = period
orgQuota = quota
for i in range(iteration):
print("iteration: ", i)
period = orgPeriod
quota = orgQuota
path.FQoutput = orgPath + \
"interval900s_iteration" + str(i) + ".out.xml"
path.FQrawFCD = orgPath + \
"interval900s_iteration" + str(i) + ".out.dat"
if mode == W_FCD:
writeRawFCD()
else:
createOutput()
print("end")
def generatePeriodQuotaSets(stopByPeriod=False):
| vtypeDictR = procFcdDict[(period, quota)]
else:
vtypeDictR = reduceVtype(taxis)
del taxis
yield(period, quota, vtypeDictR, taxiSum)
def readEdgeDump():
"""Get for each interval all edges with corresponding speed."""
edgeDumpDict = {}
begin = False
interval = 0
inputFile = open(path.FQedgeDump, 'r')
for line in inputFile:
words = line.split('"')
if not begin and words[0].find("<end>") != -1:
words = words[0].split(">")
interval = int(words[1][:-5])
edgeDumpDict.setdefault(interval, [])
elif words[0].find("<interval") != -1 and int(words[1]) >= simStartTime:
interval = int(words[1])
begin = True
if begin and words[0].find("<edge id") != -1:
edge = words[1]
if edge[0] != ':':
speed = float(words[13])
entered = int(words[15])
# if no vehicle drove of the edge ignore the edge
if entered == 0:
continue
edgeDumpDict.setdefault(interval, []).append((edge, speed))
inputFile.close()
return edgeDumpDict
def readVtype():
"""Gets all necessary information of all vehicles."""
vtypeDict = {}
timestep = 0
begin = False
inputFile = open(path.FQvtype, 'r')
for line in inputFile:
words = line.split('"')
if words[0].find("<timestep ") != -1 and int(words[1]) >= simStartTime:
timestep = int(words[1])
begin = True
if begin and words[0].find("<vehicle id=") != -1:
if words[3][0] != ':': # except inner edges
edge = words[3][:-2]
# del / Part of edge
if edge.find("/") != -1:
edge = edge.split("/")[0]
# time id edge speed
# x y
vtypeDict.setdefault(timestep, []).append(
(words[1], edge, float(words[15]), words[13], words[11]))
inputFile.close()
return vtypeDict
def readProcessedFCD():
"""Reads the processed FCD and creates a List of vtypeDict fakes with can be used similarly."""
procFcdDict = {}
pqDateDict = {} # each date is a period / quota tupel assigned
simDate = '2007-07-18 '
day = 0
# create keys for the procFcdDict
for p in period:
for q in quota:
day += 86400
date, time = calcTime.getDateFromDepart(day).split(" ")
pqDateDict.setdefault(date, (p, q))
procFcdDict.setdefault((p, q), {})
# print date,p,q
inputFile = open(path.FQprocessedFCD, 'r')
for line in inputFile:
timestamp, edge, speed, cover, id = line.split('\t')
date, time = calcTime.getNiceTimeLabel(timestamp).split(" ")
# add values to actual Dict
timestep = calcTime.getTimeInSecs(simDate + time)
procFcdDict[pqDateDict[date]].setdefault(
timestep, []).append((id, edge, float(speed) / 3.6))
inputFile.close()
return procFcdDict
def getVehicleList(vtypeDict):
"""Collects all vehicles used in the simulation."""
vehSet = set()
for timestepList in vtypeDict.values():
for elm in timestepList:
vehSet.add(elm[0])
return list(vehSet)
def make(source, dependentOn, builder, buildNew=False, *builderParams):
"""Fills the target (a variable) with Information of source (pickelt var).
It Checks if the pickle file is up to date in comparison to the dependentOn file.
If not the builder function is called.
If buildNew is True the builder function is called anyway.
"""
# check if pickle file exists
if not os.path.exists(source):
buildNew = True
# check date
# if source is newer
if not buildNew and os.path.getmtime(source) > os.path.getmtime(dependentOn):
print("load source: ", os.path.basename(source), "...", end=' ')
target = load(open(source, 'rb'))
else:
print("build source: ", os.path.basename(source), "...", end=' ')
target = builder(*builderParams)
# pickle the target
dump(target, open(source, 'wb'), 1)
print("Done!")
return target
def chooseTaxis(vehList):
""" Chooses from the vehicle list random vehicles with should act as taxis."""
# calc absolute amount of taxis
taxiNo = int(round(quota * len(vehList) / 100))
random.shuffle(vehList)
return vehList[:taxiNo]
def reduceVtype(taxis):
"""Reduces the vtypeDict to the relevant information."""
taxis.sort() # sort it for binary search
newVtypeDict = {}
for timestep in vtypeDict:
# timesteps which are a multiple of the period
if timestep % period == 0:
newVtypeDict[timestep] = (
[tup for tup in vtypeDict[timestep] if BinarySearch.isElmInList(taxis, tup[0])])
return newVtypeDict
def writeRawFCD():
"""Creates a file in the raw-fcd-format of the chosen taxis"""
global vehId, vehIdDict
vehIdDict = {}
vehId = 0
day = 0
def getVehId(orgId):
"""creates new vehicle id's which consists only numerics"""
global vehId, vehIdDict
value = vehIdDict.get(orgId, vehId)
if value is vehId:
vehIdDict[orgId] = vehId
| global period, quota
"""Generates all period-quota-sets (with creation of new Taxis for each set).
You can iterate over that generator and gets for each step the period and quota.
If stopByPeriod=True it stops not only in the quota block but in the period block to-> have a look at the code.
"""
if type(period) != list:
period = [period]
if type(quota) != list:
quota = [quota]
pList = period
qList = quota
for period in pList:
if stopByPeriod:
yield (period, None, None, None)
for quota in qList:
print("create output for: period ", period, " quota ", quota)
taxis = chooseTaxis(vehList)
taxiSum = len(taxis)
if mode == U_FCD: | identifier_body |
ParamEffectsProcessedFCD.py | 250, 300]
# [0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0, 20., 50.] #how many taxis in percent of the total vehicles | single element or a hole list
quota = [0.1, 0.2, 0.5, 1.0, 2.0, 5.0, 10.0, 20., 50.]
iteration = 2
vehId = 0
vehIdDict = {}
edgeDumpDict = None
vtypeDict = None
vehList = None
vehSum = None
procFcdDict = None
def main():
global edgeDumpDict, vtypeDict, vehList, vehSum, period, quota, procFcdDict
print("start program")
edgeDumpDict = make(path.FQedgeDumpPickle, path.FQedgeDump, readEdgeDump)
vtypeDict = make(path.FQvtypePickle, path.FQvtype, readVtype)
vehList = make(
path.FQvehPickle, path.FQvtypePickle, getVehicleList, False, vtypeDict)
vehSum = len(vehList)
if mode == U_FCD:
print("load source: ", os.path.basename(
path.FQprocessedFCD), "...", end=' ')
procFcdDict = readProcessedFCD()
print("Done!")
orgPath = path.FQoutput
if mode == W_FCD:
orgPath = path.FQrawFCD
orgPeriod = period
orgQuota = quota
for i in range(iteration):
print("iteration: ", i)
period = orgPeriod
quota = orgQuota
path.FQoutput = orgPath + \
"interval900s_iteration" + str(i) + ".out.xml"
path.FQrawFCD = orgPath + \
"interval900s_iteration" + str(i) + ".out.dat"
if mode == W_FCD:
writeRawFCD()
else:
createOutput()
print("end")
def generatePeriodQuotaSets(stopByPeriod=False):
global period, quota
"""Generates all period-quota-sets (with creation of new Taxis for each set).
You can iterate over that generator and gets for each step the period and quota.
If stopByPeriod=True it stops not only in the quota block but in the period block to-> have a look at the code.
"""
if type(period) != list:
period = [period]
if type(quota) != list:
quota = [quota]
pList = period
qList = quota
for period in pList:
if stopByPeriod:
yield (period, None, None, None)
for quota in qList:
print("create output for: period ", period, " quota ", quota)
taxis = chooseTaxis(vehList)
taxiSum = len(taxis)
if mode == U_FCD:
vtypeDictR = procFcdDict[(period, quota)]
else:
vtypeDictR = reduceVtype(taxis)
del taxis
yield(period, quota, vtypeDictR, taxiSum)
def readEdgeDump():
"""Get for each interval all edges with corresponding speed."""
edgeDumpDict = {}
begin = False
interval = 0
inputFile = open(path.FQedgeDump, 'r')
for line in inputFile:
words = line.split('"')
if not begin and words[0].find("<end>") != -1:
words = words[0].split(">")
interval = int(words[1][:-5])
edgeDumpDict.setdefault(interval, [])
elif words[0].find("<interval") != -1 and int(words[1]) >= simStartTime:
interval = int(words[1])
begin = True
if begin and words[0].find("<edge id") != -1:
edge = words[1]
if edge[0] != ':':
speed = float(words[13])
entered = int(words[15])
# if no vehicle drove of the edge ignore the edge
if entered == 0:
continue
edgeDumpDict.setdefault(interval, []).append((edge, speed))
inputFile.close()
return edgeDumpDict
def readVtype():
"""Gets all necessary information of all vehicles."""
vtypeDict = {}
timestep = 0
begin = False
inputFile = open(path.FQvtype, 'r')
for line in inputFile:
words = line.split('"')
if words[0].find("<timestep ") != -1 and int(words[1]) >= simStartTime:
timestep = int(words[1])
begin = True
if begin and words[0].find("<vehicle id=") != -1:
if words[3][0] != ':': # except inner edges
edge = words[3][:-2]
# del / Part of edge
if edge.find("/") != -1:
edge = edge.split("/")[0]
# time id edge speed
# x y
vtypeDict.setdefault(timestep, []).append(
(words[1], edge, float(words[15]), words[13], words[11]))
inputFile.close()
return vtypeDict
def readProcessedFCD():
"""Reads the processed FCD and creates a List of vtypeDict fakes with can be used similarly."""
procFcdDict = {}
pqDateDict = {} # each date is a period / quota tupel assigned
simDate = '2007-07-18 '
day = 0
# create keys for the procFcdDict
for p in period:
for q in quota:
day += 86400
date, time = calcTime.getDateFromDepart(day).split(" ")
pqDateDict.setdefault(date, (p, q))
procFcdDict.setdefault((p, q), {})
# print date,p,q
inputFile = open(path.FQprocessedFCD, 'r')
for line in inputFile:
timestamp, edge, speed, cover, id = line.split('\t')
date, time = calcTime.getNiceTimeLabel(timestamp).split(" ")
# add values to actual Dict
timestep = calcTime.getTimeInSecs(simDate + time)
procFcdDict[pqDateDict[date]].setdefault(
timestep, []).append((id, edge, float(speed) / 3.6))
inputFile.close()
return procFcdDict
def | (vtypeDict):
"""Collects all vehicles used in the simulation."""
vehSet = set()
for timestepList in vtypeDict.values():
for elm in timestepList:
vehSet.add(elm[0])
return list(vehSet)
def make(source, dependentOn, builder, buildNew=False, *builderParams):
"""Fills the target (a variable) with Information of source (pickelt var).
It Checks if the pickle file is up to date in comparison to the dependentOn file.
If not the builder function is called.
If buildNew is True the builder function is called anyway.
"""
# check if pickle file exists
if not os.path.exists(source):
buildNew = True
# check date
# if source is newer
if not buildNew and os.path.getmtime(source) > os.path.getmtime(dependentOn):
print("load source: ", os.path.basename(source), "...", end=' ')
target = load(open(source, 'rb'))
else:
print("build source: ", os.path.basename(source), "...", end=' ')
target = builder(*builderParams)
# pickle the target
dump(target, open(source, 'wb'), 1)
print("Done!")
return target
def chooseTaxis(vehList):
""" Chooses from the vehicle list random vehicles with should act as taxis."""
# calc absolute amount of taxis
taxiNo = int(round(quota * len(vehList) / 100))
random.shuffle(vehList)
return vehList[:taxiNo]
def reduceVtype(taxis):
"""Reduces the vtypeDict to the relevant information."""
taxis.sort() # sort it for binary search
newVtypeDict = {}
for timestep in vtypeDict:
# timesteps which are a multiple of the period
if timestep % period == 0:
newVtypeDict[timestep] = (
[tup for tup in vtypeDict[timestep] if BinarySearch.isElmInList(taxis, tup[0])])
return newVtypeDict
def writeRawFCD():
"""Creates a file in the raw-fcd-format of the chosen taxis"""
global vehId, vehIdDict
vehIdDict = {}
vehId = 0
day = 0
def getVehId(orgId):
"""creates new vehicle id's which consists only numerics"""
global vehId, vehIdDict
value = vehIdDict.get(orgId, vehId)
if value is vehId:
vehIdDict[orgId] = vehId | getVehicleList | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.