text
stringlengths 5
22M
| id
stringlengths 12
177
| metadata
dict | __index_level_0__
int64 0
1.37k
|
---|---|---|---|
# %%
import logging
import sys
sys.path.append("..")
from utils import *
from tqdm import tqdm
import argparse
from transformers import BertTokenizer
from typing import Dict, List, Tuple
from collections import defaultdict
import json
import os
bert_version = 'bert-large-uncased-whole-word-masking'
tokenizer: BertTokenizer = BertTokenizer.from_pretrained(bert_version)
print('load Bert tokenizer over, vocab size = {}'.format(len(tokenizer)))
statistics = defaultdict(int)
spider_type_mappings = {
'text': 'text',
'time': 'time',
'number': 'number',
'boolean': 'boolean',
'others': 'text'
}
proj_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# data_dir = os.path.join(proj_dir, 'data', 'slsql')
# load schemas from database
def get_column_names_unique(column_names: List[Tuple[int, str]], table_names: List[str], primary_keys: List[int]) -> List[str]:
column_names_dict = defaultdict(int)
for tbl_idx, col_name in column_names:
column_names_dict[col_name] += 1
column_names_unique = []
for c_idx, (tbl_idx, col_name) in enumerate(column_names):
if tbl_idx == -1:
column_names_unique.append(col_name)
continue
if column_names_dict[col_name] == 1:
column_names_unique.append(col_name)
elif c_idx in primary_keys:
column_names_unique.append(col_name)
else:
tbl_name = table_names[tbl_idx]
full_name = '{} . {}'.format(tbl_name, col_name)
column_names_unique.append(full_name)
assert len(column_names_unique) == len(column_names)
return column_names_unique
def alt_tbl_name(tbl_name):
tbl_name = tbl_name.split()
if len(tbl_name) > 1 and tbl_name[0] == 'reference':
tbl_name = tbl_name[1:]
if len(tbl_name) > 1 and tbl_name[-1] == 'data':
tbl_name = tbl_name[:-1]
if len(tbl_name) > 1 and tbl_name[-1] == 'list':
tbl_name = tbl_name[:-1]
return ' '.join(tbl_name)
def remove_shared_prefix(col_name: str, tbl_name: str) -> str:
col_tokens, tbl_tokens = col_name.split(), tbl_name.split()
idx = 0
while idx < len(col_tokens) and idx < len(tbl_tokens) and col_tokens[idx] == tbl_tokens[idx]:
idx += 1
return " ".join(col_tokens[idx:])
def get_column_name_normalized(column_lem_names: List[Tuple[int, str]], table_lem_names: List[str], verbose: bool = False):
column_norm_names, table_norm_names = [], []
for tbl_name in table_lem_names:
table_norm_names.append(alt_tbl_name(tbl_name))
for col_idx, (tbl_idx, col_name) in enumerate(column_lem_names):
if col_name == '*':
column_norm_names.append('*')
continue
col_norm_name = remove_shared_prefix(
col_name, table_norm_names[tbl_idx])
if col_norm_name != col_name and verbose:
logging.info(" {}\t{}\t{}".format(
table_norm_names[tbl_idx], col_name, col_norm_name))
column_norm_names.append(col_norm_name)
return column_norm_names, table_norm_names
def load_schema(obj: Dict) -> SpiderSchema:
column_names_lemma = obj['column_names_lemma']
table_names_lemma = obj['table_names_lemma']
column_names_original = [x[1] for x in obj['column_names_original']]
column_to_table, table_to_columns = {}, {}
for col_idx, (tbl_idx, _) in enumerate(obj['column_names']):
if tbl_idx not in table_to_columns:
table_to_columns[tbl_idx] = []
table_to_columns[tbl_idx].append(col_idx)
column_to_table[col_idx] = tbl_idx
col_norm_names, tbl_norm_names = get_column_name_normalized(
column_names_lemma, table_names_lemma, True)
return SpiderSchema(
db_id=obj['db_id'],
column_names=col_norm_names,
column_types=obj['column_types'],
column_names_lemma=[x[1] for x in column_names_lemma],
column_names_original=column_names_original,
table_names=tbl_norm_names,
table_names_lemma=table_names_lemma,
table_names_original=obj['table_names_original'],
table_to_columns=table_to_columns,
column_to_table=column_to_table,
primary_keys=obj['primary_keys'],
foreign_keys=obj['foreign_keys'])
def load_schemas(path: str):
databases = json.load(open(path, 'r', encoding='utf-8'))
schemas = {}
for database in databases:
schema = load_schema(database)
schemas[schema.db_id] = schema
return schemas
def load_value_matches(path: str) -> Dict[str, ValueMatcher]:
db_columns = defaultdict(list)
with open(path, 'r', encoding='utf-8') as fr:
for line in fr:
table = json.loads(line)
db_id = table['db_name']
table_name = table['table_name']
for column in table['columns']:
column = ("{}.{}".format(table_name, column['column_name']).lower(
), column['data_type'], column['values'])
db_columns[db_id].append(column)
db_matchers = {}
for db, columns in db_columns.items():
db_matchers[db] = ValueMatcher(columns)
return db_matchers
def _is_in_column(idx, field: str, tokens: List[SQLToken]):
if field.lower() == 'where':
if idx + 2 >= len(tokens):
return False
if tokens[idx + 1].token_type == SQLTokenType.keyword and tokens[idx + 1].value.lower() == 'in':
return True
if tokens[idx + 2].token_type == SQLTokenType.keyword and tokens[idx + 2].value.lower() == 'in':
return True
if field.lower() == 'select':
if idx - 2 >= 0 and tokens[idx - 2].token_type == SQLTokenType.keyword and tokens[idx - 2].value.lower() == 'in':
return True
# SELECT singer.name FROM singer WHERE singer.singer_id NOT IN ( SELECT song.singer_id FROM song )
if idx - 3 >= 0 and tokens[idx - 3].token_type == SQLTokenType.keyword and tokens[idx - 3].value.lower() == 'in':
return True
return False
def _is_group_by_key_column(idx, field: str, tokens: List[SQLToken], schema: SpiderSchema):
if field.lower() == 'group':
if isinstance(tokens[idx], ColumnToken):
key_code = schema.get_column_key_code(
schema.id_map[tokens[idx].column_name])
if key_code != 0:
return True
return False
def generate_identify_labels_from_sql(sql: SQLExpression, schema: SpiderSchema):
identify_labels = defaultdict(list)
is_from = False
field = None
value2column_count = defaultdict(int)
# from_tables = []
for i, token in enumerate(sql.tokens):
if isinstance(token, KeywordToken):
if token.keyword.lower() in ['from']:
is_from = True
continue
if token.keyword.lower() in CLAUSE_KEYWORDS:
field = token.keyword
is_from = False
elif isinstance(token, ColumnToken):
if not is_from and token.column_name != '*' and not _is_in_column(i, field, sql.tokens) and not _is_group_by_key_column(i, field, sql.tokens, schema):
identify_labels[str(SQLTokenType.column)
].append(token.column_name)
elif isinstance(token, TableToken):
# if not is_from:
# identify_labels[str(SQLTokenType.table)].append(token.table_name)
# else:
# from_tables.append(token.table_name)
identify_labels[str(SQLTokenType.table)].append(token.table_name)
elif isinstance(token, ValueToken):
if not is_from and field != 'LIMIT':
if token.columns is None or len(token.columns) != 1:
print(sql.sql, token.value, token.columns)
identify_labels[str(SQLTokenType.value)].append(
(token.value, token.columns))
else:
raise NotImplementedError()
if str(SQLTokenType.table) not in identify_labels:
identify_labels[str(SQLTokenType.table)] = []
if str(SQLTokenType.column) not in identify_labels:
identify_labels[str(SQLTokenType.column)] = []
for val, columns in identify_labels[str(SQLTokenType.value)]:
for column in columns:
value2column_count[column] += 1
for count in value2column_count.values():
statistics['max_value_count'] = max(
statistics['max_value_count'], count)
for key in identify_labels:
if key != str(SQLTokenType.value):
identify_labels[key] = list(set(identify_labels[key]))
return identify_labels
def generate_identify_labels_from_align(ant: Dict, schema: SpiderSchema):
identify_labels = defaultdict(list)
for tok_idx, tok_ant in enumerate(ant):
if tok_ant is None:
continue
e_type = tok_ant['type']
e_idx = tok_ant['id']
assert e_type in ['tbl', 'col', 'val']
if e_type == 'tbl':
identify_labels[str(SQLTokenType.table)].append(
schema.table_names_original[e_idx].lower())
elif e_type == 'col':
identify_labels[str(SQLTokenType.column)].append(
schema.get_column_full_name(e_idx))
elif e_type == 'val':
identify_labels[str(SQLTokenType.value)].append(
'val_{}'.format(schema.get_column_full_name(e_idx)))
identify_labels[(e_type, e_idx)].append(tok_idx)
for key in identify_labels:
identify_labels[key] = list(set(identify_labels[key]))
return identify_labels
def generate_masking_ngrams(question: Utterance, schema: SpiderSchema) -> List[Tuple[int, int, str]]:
if schema.db_id not in ngram_matchers:
column_tokens = []
for i, column in enumerate(schema.column_names):
column_tokens.append(
(schema.get_column_full_name(i), column.split(' ')))
for i, table in enumerate(schema.table_names):
column_tokens.append(
(schema.table_names_original[i], table.split(' ')))
ngram_matchers[schema.db_id] = NGramMatcher(column_tokens)
ngram_matcher = ngram_matchers[schema.db_id]
masking_ngrams = []
for tok_idx in range(len(question.tokens)):
masking_ngrams.append(
(tok_idx, tok_idx, question.tokens[tok_idx].token))
ngram_spans = set([])
for q_i, q_j, _, _, _ in ngram_matcher.match([token.token for token in question.tokens]):
ngram_spans.add((q_i, q_j))
for q_i, q_j in sorted(list(ngram_spans), key=lambda x: x[1]-x[0], reverse=True):
is_overlap = False
for q_i2, q_j2, ngram in masking_ngrams:
if q_i2 <= q_i and q_j2 >= q_j:
is_overlap = True
break
if not is_overlap:
ngram_ij = " ".join([x.token for x in question.tokens[q_i:q_j+1]])
masking_ngrams.append((q_i, q_j, ngram_ij))
return masking_ngrams
def resolve_values(question: Utterance, schema: SpiderSchema, sql: SQLExpression):
value_matcher = value_matchers[schema.db_id]
value_tokens = []
values_dict = {}
for token in sql.tokens:
if isinstance(token, ValueToken) and len(token.columns) > 0:
value_tokens.append(token)
for column in token.columns:
values_dict[(str(token.value).strip("\"").strip(
'%').lower(), column.lower())] = False
value_matches = value_matcher.match(question.text_tokens, 0.8, 3)
for value_match in value_matches:
if (str(value_match.value).lower(), value_match.column.lower()) in values_dict:
values_dict[(str(value_match.value), value_match.column)] = True
value_match.label = True
all_resolved = True
for (value, column), resolved in values_dict.items():
if not resolved:
all_resolved = False
logging.info('Value resolved: {}/{}/{}\t{}'.format(value,
schema.db_id, column, question.text))
return all_resolved, value_matches
def fix_tok(tok):
tok = tok.lower()
if tok == '-lrb-':
tok = '('
elif tok == '-rrb-':
tok = ')'
elif tok == '\"':
tok = '\''
return tok
def process_squall_query(query: Dict):
# Step1: process question tokens
question = query['question']
assert len(query['toks']) == len(query['lemma'])
question_utterance = generate_utterance(tokenizer, question, [fix_tok(
x) for x in query['toks']], [fix_tok(x) for x in query['lemma']])
# Step 2: process tables & columns
assert query['db_id'] in schemas
schema: SpiderSchema = schemas[query['db_id']]
processed_tables = []
for tbl_idx, col_indices in schema.table_to_columns.items():
# special column *
if tbl_idx == -1:
table_json = {
'index': -1,
'utterance': Utterance('*', tokens=[]).to_json(),
'columns': None
}
processed_tables += [table_json]
continue
tbl_name = schema.table_names[tbl_idx]
table_utterance = generate_utterance(tokenizer, tbl_name)
processed_columns = []
for col_idx in col_indices:
column_type = schema.column_types[col_idx]
assert column_type in spider_type_mappings, column_type
column_utterance = generate_utterance(
tokenizer, schema.column_names[col_idx])
column_json = {
'index': col_idx,
'utterance': column_utterance.to_json(),
'data_type': spider_type_mappings[column_type]
}
processed_columns += [column_json]
table_json = {
'index': tbl_idx,
'utterance': table_utterance.to_json(),
'columns': processed_columns
}
processed_tables += [table_json]
# Parse SQL
sql = parse_spider_sql(query['query'], schema)
sql_logs.append(question)
sql_logs.append(query['query'])
sql_logs.append(sql.sql + '\n')
value_resolved, matched_values = resolve_values(
question_utterance, schema, sql)
if not value_resolved:
statistics['value_unresolved'] += 1
# Generate alignment labels for our models
assert len(query['ant']) == len(question_utterance.tokens)
identify_labels = generate_identify_labels_from_sql(sql, schema)
if len(identify_labels[str(SQLTokenType.table)]) == 0:
print(question)
print(sql.sql)
masking_ngrams = generate_masking_ngrams(question_utterance, schema)
processed_query = {
'question': question_utterance.to_json(),
'tables': processed_tables,
'identify_labels': identify_labels,
'align_labels': query['ant'],
'sql': sql.to_json(),
'schema': schema.to_json(),
'masking_ngrams': masking_ngrams,
'values': [v.to_json() for v in matched_values]
}
return processed_query
def _compare_identify_labels(example: Dict):
question: Utterance = Utterance.from_json(example['question'])
identify_labels_from_sql = example['identify_labels']
schema: SpiderSchema = SpiderSchema.from_json(example['schema'])
sql: SQLExpression = SQLExpression.from_json(example['sql'])
identify_labels_from_align = generate_identify_labels_from_align(
example['align_labels'], schema)
cmp_results = []
cmp_results.append("Q: {}\n".format(question.text))
cmp_results.append("SQL: {}\n".format(sql.sql))
cmp_results.append("Table Shared: {}\n".format(' '.join(sorted(set(identify_labels_from_sql[str(
SQLTokenType.table)]) & set(identify_labels_from_align[str(SQLTokenType.table)])))))
cmp_results.append("Table SQL: {}\n".format(' '.join(sorted(set(identify_labels_from_sql[str(
SQLTokenType.table)]) - set(identify_labels_from_align[str(SQLTokenType.table)])))))
cmp_results.append("Table Align: {}\n".format(' '.join(sorted(set(identify_labels_from_align[str(
SQLTokenType.table)]) - set(identify_labels_from_sql[str(SQLTokenType.table)])))))
cmp_results.append("Column Shared: {}\n".format(' '.join(sorted(set(identify_labels_from_sql[str(
SQLTokenType.column)]) & set(identify_labels_from_align[str(SQLTokenType.column)])))))
cmp_results.append("Column SQL: {}\n".format(' '.join(sorted(set(identify_labels_from_sql[str(
SQLTokenType.column)]) - set(identify_labels_from_align[str(SQLTokenType.column)])))))
cmp_results.append("Column Align: {}\n".format(' '.join(sorted(set(identify_labels_from_align[str(
SQLTokenType.column)]) - set(identify_labels_from_sql[str(SQLTokenType.column)])))))
cmp_results.append("Table: {}\n".format(set(identify_labels_from_sql[str(
SQLTokenType.table)]) == set(identify_labels_from_align[str(SQLTokenType.table)])))
cmp_results.append("Column: {}\n".format(set(identify_labels_from_sql[str(
SQLTokenType.column)]) == set(identify_labels_from_align[str(SQLTokenType.column)])))
cmp_results.append('\n')
identify_labels_equal['table'] += int(set(identify_labels_from_sql[str(
SQLTokenType.table)]) == set(identify_labels_from_align[str(SQLTokenType.table)]))
identify_labels_equal['column'] += int(set(identify_labels_from_sql[str(
SQLTokenType.column)]) == set(identify_labels_from_align[str(SQLTokenType.column)]))
return cmp_results
def compare_identify_labels(examples, saved_path: str):
with open(saved_path, 'w', encoding='utf-8') as fw:
for example in examples:
fw.writelines(_compare_identify_labels(example))
print('Compare over!')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, required=True)
args = parser.parse_args()
print(args)
data_dir = args.data_dir
if not os.path.exists(data_dir):
print(f'{data_dir} does not exists. exit.')
sys.exit(0)
else:
print(f'load data from {data_dir}')
if os.path.exists(os.path.join(data_dir, 'preprocess.log')):
os.remove(os.path.join(data_dir, 'preprocess.log'))
logging.basicConfig(filename=os.path.join(
data_dir, 'preprocess.log'), level=logging.DEBUG)
schemas = load_schemas(os.path.join(data_dir, 'processed_tables.json'))
print('load schems over, size = {}'.format(len(schemas)))
value_matchers = load_value_matches(
os.path.join(data_dir, 'spider_tables.txt'))
ngram_matchers: Dict[str, NGramMatcher] = {}
sql_logs = []
dev_queries = json.load(
open(os.path.join(data_dir, 'slsql_dev.json'), 'r', encoding='utf-8'))
train_queries = json.load(
open(os.path.join(data_dir, 'slsql_train.json'), 'r', encoding='utf-8'))
print('load SLSQL dev & train queries over, size = {}/{}'.format(len(dev_queries), len(train_queries)))
out = process_squall_query(dev_queries[111])
dev_processed = []
statistics['value_unresolved'] = 0
for query in tqdm(dev_queries):
dev_processed += [process_squall_query(query)]
save_json_objects(dev_processed, os.path.join(
data_dir, 'dev.{}.json'.format(bert_version)))
print('process dev over, value_unresolved: {}'.format(
statistics['value_unresolved']))
open(os.path.join(data_dir, 'dev.parsed_sqls.log'),
'w', encoding='utf-8').write('\n'.join(sql_logs))
print('save parsed sqls ...')
identify_labels_equal = defaultdict(int)
compare_identify_labels(dev_processed, os.path.join(
data_dir, 'dev.identify_labels.diff.txt'))
print('Identify labesl generated from SQL accuracy: table = {:.4f} ({}/{}), column = {:.4f} ({}/{})'.format(
identify_labels_equal['table'] / len(dev_processed),
identify_labels_equal['table'],
len(dev_processed),
identify_labels_equal['column'] / len(dev_processed),
identify_labels_equal['column'],
len(dev_processed),
))
train_processed = []
statistics['value_unresolved'] = 0
for query in tqdm(train_queries):
train_processed += [process_squall_query(query)]
save_json_objects(train_processed, os.path.join(
data_dir, 'train.{}.json'.format(bert_version)))
print('process train over, value_unresolved: {}'.format(
statistics['value_unresolved']))
dev_iter = load_spider_data_iterator(os.path.join(data_dir, 'dev.{}.json'.format(
bert_version)), tokenizer, 16, torch.device('cpu'), False, False, 512)
total_size, num_examples = 0, 0
input_tokens = []
for batch_input in dev_iter:
bs, length = batch_input['input_token_ids'].size(
0), batch_input['input_token_ids'].size(1)
total_size += bs * length
num_examples += bs
for i in range(bs):
input_tokens.append(
" ".join(batch_input['input_tokens'][i]) + '\n')
print(total_size, num_examples, total_size / num_examples)
open(os.path.join(data_dir, 'dev.input_tokens.txt'),
'w', encoding='utf-8').writelines(input_tokens)
train_iter = load_spider_data_iterator(os.path.join(data_dir, 'train.{}.json'.format(
bert_version)), tokenizer, 16, torch.device('cpu'), True, True, 512)
total_size, num_examples = 0, 0
for batch_input in train_iter:
bs, length = batch_input['input_token_ids'].size(
0), batch_input['input_token_ids'].size(1)
total_size += bs * length
num_examples += bs
# print(batch_input['input_token_ids'].size())
print(total_size, num_examples, total_size / num_examples)
train_iter2 = load_spider_data_iterator(os.path.join(data_dir, 'train.{}.json'.format(
bert_version)), tokenizer, 16, torch.device('cpu'), False, True, 400)
total_size, num_examples = 0, 0
for batch_input in train_iter2:
bs, length = batch_input['input_token_ids'].size(
0), batch_input['input_token_ids'].size(1)
total_size += bs * length
num_examples += bs
# print(batch_input['input_token_ids'].size())
print(total_size, num_examples, total_size / num_examples)
dev_examples = json.load(open(os.path.join(
data_dir, 'train.{}.json'.format(bert_version)), 'r', encoding='utf-8'))
threshold = 0.81
count1, count2, count3 = 0, 0, 0
for example in dev_examples:
values: List[ValueMatch] = [
ValueMatch.from_json(x) for x in example['values']]
for value in values:
if value.score < threshold and value.score > 0.5:
if value.label and len(value.value) <= 4:
print(value)
count1 += 1
if len(value.value) > 4:
count3 += 1
continue
count2 += int(value.label)
print(count1, count2, count3)
|
ContextualSP/awakening_latent_grounding/scripts/data_preprocess.grounding.py/0
|
{
"file_path": "ContextualSP/awakening_latent_grounding/scripts/data_preprocess.grounding.py",
"repo_id": "ContextualSP",
"token_count": 10319
}
| 224 |
#!/usr/bin/env bash
wget https://obj.umiacs.umd.edu/elgohary/CANARD_Release.zip
unzip -j CANARD_Release.zip
rm -rf CANARD_Release.zip
python ../../preprocess.py --dataset CANARD
|
ContextualSP/incomplete_utterance_rewriting/dataset/CANARD/download.sh/0
|
{
"file_path": "ContextualSP/incomplete_utterance_rewriting/dataset/CANARD/download.sh",
"repo_id": "ContextualSP",
"token_count": 71
}
| 225 |
import argparse
import sys
from allennlp.commands import main
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--model_file", required=True, type=str,
help="Please specify a model file to evaluate")
arg_parser.add_argument("--test_file", required=True, type=str,
help="Please specify a model file to evaluate")
parsed_args = arg_parser.parse_args()
model_file = parsed_args.model_file
test_file = parsed_args.test_file
result_file = model_file + ".json"
sys.argv = [
"allennlp",
"evaluate",
"--output-file", result_file,
"--cuda-device", 0,
"--include-package", "data_reader",
"--include-package", "model",
model_file,
test_file
]
main()
|
ContextualSP/incomplete_utterance_rewriting/src/evaluate.py/0
|
{
"file_path": "ContextualSP/incomplete_utterance_rewriting/src/evaluate.py",
"repo_id": "ContextualSP",
"token_count": 371
}
| 226 |
# coding: utf-8
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from src.utils.algo_utils import BipartiteGraphSolver
class HingeLoss(nn.Module):
def __init__(self, margin=0.6, aggregation='max', l1_norm_weight=0, entropy_norm_weight=0):
super(HingeLoss, self).__init__()
self.margin = margin
self.aggregation = aggregation
self.l1_norm_weight = l1_norm_weight
self.entropy_norm_weight = entropy_norm_weight
self.bipartite_graph_solver = BipartiteGraphSolver()
def forward(self, pos_align, neg_align, lengths):
# src_lengths, pos_tgt_lengths, neg_tgt_lengths = lengths
positive_lengths, negative_lengths = lengths
positive_lengths = positive_lengths.permute(1, 0)
negative_lengths = negative_lengths.permute(1, 0)
src_lengths = positive_lengths[0]
pos_tgt_lengths = positive_lengths[1]
neg_tgt_lengths = negative_lengths[1]
'''
temp = torch.sqrt(torch.FloatTensor([self.args.hidden_size * 2]))
if self.args.cuda:
temp = temp.cuda()
pos_align = torch.div(pos_align, temp)
neg_align = torch.div(neg_align, temp)
'''
# print('pos_align', pos_align)
# print('neg_align', neg_align)
positive_n = sum(positive_lengths[0] * positive_lengths[1])
negative_n = sum(negative_lengths[1] * negative_lengths[1])
pos_l1_norm, neg_l1_norm = torch.norm(pos_align, p=1) / positive_n, torch.norm(neg_align, p=1) / negative_n
# print('pos_norm', type(pos_l1_norm), pos_l1_norm)
# print('neg_norm', type(neg_l1_norm), neg_l1_norm)
# print('pos_norm', pos_align)
# print('neg_norm', neg_align)
# Entropy loss
pos_row_entropy = F.softmax(pos_align, dim=-1) * F.log_softmax(pos_align, dim=-1)
neg_row_entropy = F.softmax(neg_align, dim=-1) * F.log_softmax(neg_align, dim=-1)
pos_row_entropy = -1 * pos_row_entropy.sum()
neg_row_entropy = -1 * neg_row_entropy.sum()
pos_col_entropy = F.softmax(pos_align, dim=0) * F.log_softmax(pos_align, dim=0)
neg_col_entropy = F.softmax(neg_align, dim=0) * F.log_softmax(neg_align, dim=0)
pos_col_entropy = -1 * pos_col_entropy.sum()
neg_col_entropy = -1 * neg_col_entropy.sum()
entropy_norm = pos_row_entropy - neg_row_entropy + pos_col_entropy - neg_col_entropy
# print('entropy', type(entropy_norm), entropy_norm)
if self.aggregation == 'max':
pos_align_score, neg_align_score = torch.max(pos_align, -1)[0], torch.max(neg_align, -1)[0]
elif self.aggregation == 'sum':
pos_align_score, neg_align_score = torch.sum(pos_align, -1), torch.sum(neg_align, -1)
pos_align_score = torch.div(pos_align_score, src_lengths.float().reshape((-1, 1)))
neg_align_score = torch.div(neg_align_score, src_lengths.float().reshape((-1, 1)))
elif self.aggregation == 'match':
pos_align_score = 0
pos_matrix = [x.detach().cpu().numpy() for x in pos_align]
pos_assignment_positions = [self.bipartite_graph_solver.find_max(x)[1] for x in pos_matrix]
for idx, pos_assignment_position in enumerate(pos_assignment_positions):
for x, y in zip(*pos_assignment_position):
pos_align_score += pos_align[idx, x, y]
pos_align_score /= sum(positive_lengths[0])
# pos_assignment = [list(zip([i] * len(pos_assignment_positions[0][0]),
# pos_assignment_positions[i][0],
# pos_assignment_positions[i][1]))
# for i in range(len(pos_assignment_positions))]
# pos_assignment = [_ for x in pos_assignment for _ in x]
neg_align_score = 0
neg_matrix = [x.detach().cpu().numpy() for x in neg_align]
neg_assignment_positions = [self.bipartite_graph_solver.find_max(x)[1] for x in neg_matrix]
for idx, neg_assignment_position in enumerate(neg_assignment_positions):
for x, y in zip(*neg_assignment_position):
neg_align_score += neg_align[idx, x, y]
neg_align_score /= sum(negative_lengths[0])
pass
# neg_assignment = [list(zip([i] * len(neg_assignment_positions[0][0]),
# neg_assignment_positions[i][0],
# neg_assignment_positions[i][1]))
# for i in range(len(neg_assignment_positions))]
# neg_assignment = [_ for x in neg_assignment for _ in x]
# pos_align_score = sum([pos_align[point] for point in pos_assignment])
# neg_align_score = sum([neg_align[point] for point in neg_assignment])
else:
raise ValueError("Hinge loss only supports max/sum aggregation.")
pos_align_score = torch.sum(pos_align_score, -1)
neg_align_score = torch.sum(neg_align_score, -1)
pos_align_score = torch.div(pos_align_score, pos_tgt_lengths.float())
neg_align_score = torch.div(neg_align_score, neg_tgt_lengths.float())
hinge_loss = torch.mean(torch.clamp(self.margin - (pos_align_score - neg_align_score), min=0.0)) + \
self.l1_norm_weight * (pos_l1_norm + neg_l1_norm) + self.entropy_norm_weight * entropy_norm
return hinge_loss
|
ContextualSP/interactive_text_to_sql/src/loss.py/0
|
{
"file_path": "ContextualSP/interactive_text_to_sql/src/loss.py",
"repo_id": "ContextualSP",
"token_count": 2658
}
| 227 |
import glob
import os
from abc import ABCMeta, abstractproperty, abstractmethod
from collections import Sequence
from os.path import join
import tensorflow as tf
from keras.callbacks import ModelCheckpoint, EarlyStopping, TensorBoard
import keras.engine
from tensorflow import Tensor
from gtd.io import JSONPicklable, makedirs
class Batch(Sequence, metaclass=ABCMeta):
"""An immutable Sequence of Example objects."""
@abstractproperty
def uid(self):
"""An integer that uniquely identifies this batch."""
pass
def __hash__(self):
return hash(self.uid)
def __eq__(self, other):
if not isinstance(other, Batch):
return False
return self.uid == other.uid
class Model(object):
"""A Model encapsulates a network of TensorFlow operations.
Each Model typically implements some modular and reusable functionality, e.g. "feed forward network"
or "LSTM" or "neural attention". A full system is constructed by composing together several Models to form one
large Model.
"""
pass
class Feedable(Model, metaclass=ABCMeta):
"""A Model that can be fed plain old Python objects (e.g. a list of strings) as input.
A Feedable defines a function which converts input objects into numpy arrays, which can then be passed into the
TensorFlow computation graph.
"""
@abstractmethod
def inputs_to_feed_dict(self, *args, **kwargs):
"""Convert inputs into a feed_dict that can be fed into Session.run.
Args:
args, kwargs: input arguments to this model.
Returns:
dict[Tensor, np.array]: a feed_dict is a dict mapping placeholders to their assignments (numpy arrays).
"""
pass
@classmethod
def inputs_to_feed_dict_union(cls, models, *args, **kwargs):
"""Convenience method for merging the feed_dicts of several models which all take the same inputs.
Args:
models (list[Feedable])
"""
feed_dict = {}
for model in models:
feed_dict.update(model.inputs_to_feed_dict(*args, **kwargs))
return feed_dict
def compute(self, fetch, *args, **kwargs):
"""Compute outputs, given inputs.
Uses the current default Session for execution.
Args:
fetch: anything that can be fetched by Session.run.
args, kwargs: input arguments, matching the arguments passed to feed_dict
Returns:
the result of Session.run
"""
sess = tf.get_default_session()
if sess is None:
raise ValueError('No default TensorFlow Session registered.')
feed = self.inputs_to_feed_dict(*args, **kwargs)
results = sess.run(fetch, feed_dict=feed)
return results
class Optimizable(Model, metaclass=ABCMeta):
"""A Model with a differentiable objective function."""
@abstractproperty
def objective_tensor(self):
"""A scalar Tensor that we will take gradients with respect to."""
pass
@property
def gradients(self):
"""A map from Variable Tensors to their gradient Tensors."""
try:
return self._var_to_grad
except AttributeError:
optimizer = tf.train.GradientDescentOptimizer(0.01) # we merely use this optimizer to identify gradients
self._var_to_grad = {v: g for g, v in optimizer.compute_gradients(self.objective_tensor) if g is not None}
return self._var_to_grad
@property
def variables(self):
"""The set of variables which affect the objective_tensor."""
return set(self.gradients.keys())
class KerasModel(Feedable):
"""A Model that can be trained with Keras.
A KerasModel explicitly declares its `output_tensors` and input `placeholders`.
Using Keras:
- Setup
- Remember to configure Keras to use the TensorFlow backend
- If you use Keras layers, you MUST bind Keras to a TensorFlow session before constructing layers.
- see [this](https://blog.keras.io/keras-as-a-simplified-interface-to-tensorflow-tutorial.html) for more info.
- Note that Keras Input layers return plain old TensorFlow placeholders
- When initializing variables, do NOT use tf.initialize_all_variables(). This will overwrite the initialization
performed by Keras. Instead, use the `gtd.ml.utils.guarantee_initialized_variables` function.
- If you plan to use the KerasTrainer, your ENTIRE model must use Keras Layers from beginning to end. You cannot
intersperse with TF Operations (Keras needs to propagate its own metadata).
"""
@abstractproperty
def placeholders(self):
"""Placeholders owned by this Model.
Returns:
list[Tensor]
"""
pass
@classmethod
def placeholders_union(cls, models):
"""Convenience method for merging the placeholders of several models.
Args:
models (list[KerasModel])
"""
phs = []
for model in models:
phs.extend(model.placeholders)
return phs
@abstractproperty
def output_tensors(self):
"""Outputs of this model.
Returns:
list[Tensor]: a list of Tensors.
"""
pass
class KerasObjective(KerasModel, metaclass=ABCMeta):
"""Specifies the loss functions for training a model, as well as how to assign values to label Placeholders."""
@abstractproperty
def losses(self):
"""List of losses.
Returns:
list[(Tensor, Tensor, Tensor)]: a list of (label, objective, metric) triples.
e.g. (some_tensor, 'sparse_categorical_crossentropy', 'accuracy')
"""
pass
class KerasTrainer(object):
def __init__(self, model, objective, optimizer, batch_size, save_dir):
"""Create a KerasTrainer.
Responsible for training, checkpointing weights, and restoring weights from disk.
Args:
model (KerasModel)
objective (KerasObjective)
optimizer: optimizer for Keras
batch_size (int)
save_dir (str)
"""
self.model = model
self.objective = objective
self._batch_size = batch_size
self._save_dir = save_dir
labels, objectives, metrics = [list(seq) for seq in zip(*objective.losses)]
self.inputs = model.placeholders
self.outputs = labels
with tf.name_scope('keras_trainer'):
keras_model = keras.engine.Model(input=self.inputs, output=self.outputs)
keras_model.compile(optimizer=optimizer, loss=objectives, metrics=metrics)
self.keras_model = keras_model
@property
def batch_size(self):
return self._batch_size
def _vectorized_batches(self, batches):
"""Convert iterable of Batches into iterable of vectorized batches.
Args:
batches (Iterable[Batch])
Returns:
Iterable: iterable of feed_dicts.
"""
for batch in batches:
feed_x = self.model.inputs_to_feed_dict(batch)
feed_y = self.objective.inputs_to_feed_dict(batch)
X = [feed_x[i] for i in self.inputs]
Y = [feed_y[o] for o in self.outputs]
yield X, Y
def train(self, train_batches, valid_batches, samples_per_epoch, nb_epoch, nb_val_samples, extra_callbacks=None):
"""Train the model.
Automatically adds the following Keras callbacks:
- ModelCheckpoint
- EarlyStopping
- TensorBoard
Args:
train_batches (Iterable[Batch]): an iterable of training Batches
valid_batches (Iterable[Batch]): an iterable of validation Batches
samples_per_epoch (int)
nb_epoch (int): max number of epochs to train for
nb_val_samples (int): number of samples for validation
extra_callbacks (list): a list of additional Keras callbacks to run
"""
checkpoint_path = join(self.checkpoint_dir, 'weights.{epoch:02d}-{val_loss:.2f}.hdf5')
checkpointer = ModelCheckpoint(checkpoint_path, verbose=1, save_best_only=False)
early_stopper = EarlyStopping(monitor='val_loss', patience=2, verbose=1)
tboard = TensorBoard(self.tensorboard_dir, write_graph=False)
callbacks = [checkpointer, early_stopper, tboard]
if extra_callbacks:
callbacks.extend(extra_callbacks)
train = self._vectorized_batches(train_batches)
valid = self._vectorized_batches(valid_batches)
self.keras_model.fit_generator(train, samples_per_epoch, nb_epoch,
callbacks=callbacks,
validation_data=valid, nb_val_samples=nb_val_samples
)
@property
def save_dir(self):
return self._save_dir
@classmethod
def get_checkpoint_dir(cls, save_dir):
return join(save_dir, 'checkpoints')
@classmethod
def get_tensorboard_dir(cls, save_dir):
return join(save_dir, 'tensorboard')
@property
def checkpoint_dir(self):
p = self.get_checkpoint_dir(self.save_dir)
makedirs(p)
return p
@property
def tensorboard_dir(self):
p = self.get_tensorboard_dir(self.save_dir)
makedirs(p)
return p
@classmethod
def get_checkpoint_paths(cls, save_dir):
checkpoint_dir = cls.get_checkpoint_dir(save_dir)
pattern = join(checkpoint_dir, '*.hdf5')
return list(glob.iglob(pattern))
@property
def latest_checkpoint_path(self):
checkpoint_paths = self.get_checkpoint_paths(self.save_dir)
latest = max(checkpoint_paths, key=os.path.getctime)
return latest
def load_weights(self, path):
self.keras_model.load_weights(path)
|
ContextualSP/lemon/executor/gtd/ml/framework.py/0
|
{
"file_path": "ContextualSP/lemon/executor/gtd/ml/framework.py",
"repo_id": "ContextualSP",
"token_count": 4098
}
| 228 |
import numpy as np
import pytest
from gtd.ml.vocab import SimpleVocab, SimpleEmbeddings
@pytest.fixture
def vocab():
return SimpleVocab(['a', 'b', 'c'])
@pytest.fixture
def embeds(vocab):
array = np.eye(len(vocab))
return SimpleEmbeddings(array, vocab)
class TestSimpleVocab(object):
def test_save_load(self, vocab, tmpdir):
path = str(tmpdir.join('vocab.txt'))
vocab.save(path)
new_vocab = SimpleVocab.load(path)
assert vocab == new_vocab
|
ContextualSP/lemon/executor/gtd/tests/ml/test_vocab.py/0
|
{
"file_path": "ContextualSP/lemon/executor/gtd/tests/ml/test_vocab.py",
"repo_id": "ContextualSP",
"token_count": 214
}
| 229 |
from collections import Sequence
import sys
from gtd.io import JSONPicklable
from gtd.utils import cached_property, UnicodeMixin
from strongsup.predicate import Predicate
from strongsup.utils import PredicateList
from strongsup.value import Value
from strongsup.world import World
class Example(JSONPicklable):
"""An input context paired with the correct answer.
Args:
context (BaseContext)
answer (list[Value]): target answer
logical form (list [Predicate]): target logical form
"""
def __init__(self, context, answer=None, logical_form=None):
assert isinstance(context, BaseContext)
self._context = context
if answer:
assert all(isinstance(x, Value) for x in answer)
self._answer = answer
if logical_form:
assert all(isinstance(x, Predicate) for x in logical_form)
self._logical_form = logical_form
@property
def context(self):
return self._context
@property
def answer(self):
"""The correct answer to the question, as a list of Values.
Returns:
list[Value]
"""
return self._answer
@property
def logical_form(self):
"""The correct logical form for the example.
A list of Predicates.
Raises:
AttributeError, if no logical form present
Returns:
list[Predicate]
"""
return self._logical_form
def __getstate__(self):
return self.context, self.answer, self.logical_form
def __setstate__(self, state):
context, answer, logical_form = state
self.__init__(context, answer, logical_form)
class Utterance(Sequence, UnicodeMixin):
__slots__ = ['_tokens', '_context', '_utterance_idx', '_predicates', '_predicate_alignments']
def __init__(self, tokens, context, utterance_idx, predicate_alignments):
"""Create an Utterance.
Args:
tokens (tuple[unicode] | list[unicode]): list of words
context (Context): context that this utterance belongs to
utterance_idx (int): index of this utterance in context.utterances
predicate_alignments (dict[Predicate, list[(int, float)]]): a map from predicates to alignments.
"""
assert isinstance(tokens, list) or isinstance(tokens, tuple)
if len(tokens) > 0:
assert isinstance(tokens[0], str)
self._tokens = tokens
self._context = context
self._utterance_idx = utterance_idx
# compute allowable predicates and their alignments with the utterance
self._predicate_alignments = predicate_alignments
self._predicates = PredicateList(sorted(self._predicate_alignments.keys()))
def __getitem__(self, i):
return self._tokens[i]
def __len__(self):
return len(self._tokens)
@property
def context(self):
return self._context
@property
def utterance_idx(self):
return self._utterance_idx
@property
def _id(self):
"""An ID that uniquely identifies the utterance"""
return (self.context, self.utterance_idx)
def __hash__(self):
return hash(self._id)
def __eq__(self, other):
return other._id == self._id
def __unicode__(self):
return ' '.join(self._tokens)
@property
def predicates(self):
"""All allowable predicates for this utterance.
CandidateGenerator uses this to generate candidates
Returns:
PredicateList (similar to list[Predicate] but with fast index lookup)
"""
return self._predicates
@property
def predicate_alignments(self):
return self._predicate_alignments
def predicate_alignment(self, predicate):
"""Return the alignment between the specified predicate and utterance (for soft copying)
Args:
predicate (Predicate)
Returns:
list[(utterance token index, alignment strength)]
utterance token index is an int in range(len(utterance))
alignment strength is a float between 0 and 1, inclusive
"""
if predicate not in self._predicate_alignments:
#print >> sys.stderr, u'WARNING: {} not in matched predicates! [{}; {}]'.format(
# predicate, u' '.join(self._tokens), self.context.world)
return []
return self._predicate_alignments[predicate]
class DelexicalizedUtterance(Utterance):
__slots__ = ['_placeholder_positions']
def __init__(self, tokens, context, utterance_idx, predicate_alignments, placeholder_positions, orig_utterance):
self._placeholder_positions = placeholder_positions
self._original_utterance = orig_utterance
super(DelexicalizedUtterance, self).__init__(tokens, context, utterance_idx, predicate_alignments)
@property
def original_utterance(self):
return self._original_utterance
@property
def placeholder_positions(self):
"""A dict mapping from a Predicate to the list of positions in the delex'd utterance where it appears.
Returns:
dict[Predicate, list[int]]
"""
return self._placeholder_positions
################################
# Context
class BaseContext(UnicodeMixin):
def __init__(self, world, utterances):
"""Initialize a Context.
Args:
world (World)
utterances (list[Utterance])
"""
assert isinstance(world, World)
self._world = world
self._utterances = utterances
# aggregate predicates
preds_union = set()
for utt in self._utterances:
preds_union.update(utt.predicates)
self._predicates = PredicateList(sorted(preds_union))
self._silver_logical_form = None
@property
def world(self):
"""Return the World."""
return self._world
@property
def utterances(self):
"""Utterances.
Returns:
list[Utterance]
"""
return self._utterances
@property
def predicates(self):
"""The union of the allowable predicates for each utterance in this context.
CandidateGenerator uses this to generate candidates.
Returns:
PredicateList (similar to list[Predicate] but with fast index lookup)
"""
return self._predicates
@property
def silver_logical_form(self):
"""Parse path for highest prob logical form that has been generated
for this context that executes to the correct denotation. Could
be None.
Returns:
ParsePath
"""
return self._silver_logical_form
@property
def executor(self):
"""Return the Executor."""
return self._world.executor
def __unicode__(self):
return '\n'.join([str(utt) for utt in self.utterances])
class Context(BaseContext):
"""The necessary and sufficient information to answer a query utterance."""
def __init__(self, world, raw_utterances):
"""Initialize a Context.
Args:
world (World)
raw_utterances (list[list[unicode]])
"""
assert isinstance(raw_utterances, list), raw_utterances
assert isinstance(raw_utterances[0], list), raw_utterances[0]
assert isinstance(raw_utterances[0][0], str), raw_utterances[0][0]
# compute Predicate alignments and construct Utterance objects
utterances = []
for i, raw_utt in enumerate(raw_utterances):
predicate_alignments = dict(world.predicates_computer.compute_predicates(raw_utt))
utt = Utterance(raw_utt, self, i, predicate_alignments)
utterances.append(utt)
super(Context, self).__init__(world, utterances)
class DelexicalizedContext(BaseContext):
def __init__(self, context):
self._original_context = context
utterances = context.utterances
delex_utterances = [self._delexicalize_utterance(utt) for utt in utterances]
super(DelexicalizedContext, self).__init__(context.world, delex_utterances)
@property
def original_context(self):
return self._original_context
def _delexicalize_utterance(self, utt):
"""Compute the delexicalized version of the utterance.
Args:
utt (Utterance): the original utterance
Some phrases are collapsed into placeholders strings.
These strings are derived from predicate.delexicalized_name
and conventionally begin with an uppercase letter.
Delexicalization uses this strategy:
- Sort aligned predicates by score (sum of alignment weights)
- Starting from higher scores, mark out the utterance tokens
that each predicate is aligned to.
The set of predicates on the utterance remain the same.
The predicate alignment positions are now relative
to the new delexicalized utterance. Alignment strengths
to the collapsed tokens are averaged out.
"""
if isinstance(utt, DelexicalizedUtterance):
raise ValueError('Already delexicalized.')
# Sort the predicates by heuristic scores
aligned_predicates = [] # (predicate, alignment, score)
for predicate, alignment in utt.predicate_alignments.items():
# Ignore some predicates (unaligned or should not be delexicalized)
if not alignment or predicate.delexicalized_name is None:
continue
# Compute the clean alignment (only use the exact-matched portion)
clean_alignment = [
index for (index, strength) in alignment
if strength == 1.0]
# Cut into contiguous segments
clean_segments = []
for x in clean_alignment:
if not clean_segments or x != clean_segments[-1][-1] + 1:
clean_segments.append([x])
else:
clean_segments[-1].append(x)
#score = sum(strength for (_, strength) in alignment)
for segment in clean_segments:
aligned_predicates.append((predicate, segment, len(segment)))
aligned_predicates.sort(key=lambda x: -x[2])
# Greedily replace utterance tokens with placeholders
replacements = [False] * len(utt)
for predicate, segment, score in aligned_predicates:
# Avoid overlap
if any(replacements[index] for index in segment):
continue
for index in segment:
replacements[index] = predicate
# Compute the delexicalized utterance
tokens = []
placeholder_positions = {}
old_to_new_indices = []
last_replacement = None
for token, replacement in zip(utt, replacements):
if not replacement:
tokens.append(token)
elif replacement != last_replacement:
placeholder_positions\
.setdefault(replacement, []).append(len(tokens))
tokens.append(replacement.delexicalized_name)
old_to_new_indices.append(len(tokens) - 1)
last_replacement = replacement
# Compute predicate_alignments
predicate_alignments = {}
for predicate, old_alignment in utt.predicate_alignments.items():
if not old_alignment:
predicate_alignments[predicate] = old_alignment
else:
new_alignment = {}
for index, strength in old_alignment:
new_index = old_to_new_indices[index]
new_alignment.setdefault(new_index, []).append(strength)
predicate_alignments[predicate] = [
(index, sum(strengths) / len(strengths))
for (index, strengths) in new_alignment.items()]
# Add placeholder positions for reversed relations
if predicate.name[0] == '!':
for x in placeholder_positions:
if x.name == predicate.name[1:]:
placeholder_positions[predicate] = \
placeholder_positions[x]
break
return DelexicalizedUtterance(tokens, self, utt.utterance_idx, predicate_alignments,
placeholder_positions, utt)
|
ContextualSP/lemon/executor/strongsup/example.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/example.py",
"repo_id": "ContextualSP",
"token_count": 5232
}
| 230 |
import abc
import os
import pickle
import time
import sys
from dependency.data_directory import DataDirectory
from prettytable import PrettyTable
from strongsup.results.entry import Entry
from strongsup.results.result_value import ResultValue
class Tracker(object, metaclass=abc.ABCMeta):
"""Tracks a set of a results. In charge of maintaining up to date
results for each Entry.
Args:
name (string): name of this tracker
parent (Tracker): a tracker or None
"""
def __init__(self, name, parent=None):
self._name = name
self._parent = parent
self._load() # Load sub-trackers or entries
@property
def name(self):
return self._name
@abc.abstractmethod
def merge(self, other):
"""Merges two trackers together.
Args:
other (Tracker): the other tracker
"""
raise NotImplementedError()
@abc.abstractmethod
def _load(self):
"""Loads the Tracker object from somewhere, generally from file"""
raise NotImplementedError()
def _match(self, x, filters=None):
"""Returns true iff x's name substring matches
one of the filters OR filters is None
Args:
x: something with a name property
filters (list[string]): the filters
Returns:
bool: if there's a match
"""
if not filters:
return True
return any(
[x.name.find(filt) != -1 for filt in filters])
def __str__(self):
return "Tracker({})".format(self.name)
__repr__ = __str__
class TopLevelTracker(Tracker):
def __init__(self, name, parent=None):
super(TopLevelTracker, self).__init__(name, parent)
def entries(self, dataset_filters=None, experiment_type_filters=None):
"""Returns all entries that substring match strings in
experiment_type_filters
Args:
dataset_filters (list[string]): the substrings to match datasets
on, None matches everything.
experiment_type_filters (list[string]): the substrings to match,
None matches everything
Returns:
list[Entry]: all matching entries
"""
filter_fn = lambda x: self._match(x, dataset_filters)
trackers = list(filter(filter_fn, iter(self._trackers.values())))
entries = []
for tracker in trackers:
entries.extend(tracker.entries(experiment_type_filters))
return entries
def add_result(self, dataset, experiment_type, seed, result_value):
"""Adds a result associated with this dataset, experiment_type and
seed
Args:
dataset (string)
experiment_type (ExperimentType)
seed (int)
result_value (ResultValue)
"""
tracker = self._trackers.setdefault(
dataset, LeafTracker(dataset, self))
tracker.add_result(experiment_type, seed, result_value)
def _update_result(self, dataset, experiment_type, seed, result_value):
"""Should not get called externally."""
tracker = self._trackers.setdefault(
dataset, LeafTracker(dataset, self))
tracker._update_result(experiment_type, seed, result_value)
def merge(self, other):
for dataset, tracker in other._trackers.items():
self._trackers.setdefault(
dataset, LeafTracker(dataset)).merge(tracker)
self._running_jobs.extend(other._running_jobs)
self._complete_jobs.extend(other._complete_jobs)
def refresh_result(self, dataset, experiment_type, seed, path):
"""Re-fetches the result at this path. Marks the experiment
as in-progress again.
Args:
dataset (string): the dataset of the result
experiment_type (ExperimentType): the experiment type of result
seed (int): seed of result
path (string): filesystem path of experiment directory
"""
success, result, access = self._fetch_result(path, None)
assert success
self._update_result(dataset, experiment_type, seed, result)
self.register_result(dataset, experiment_type, seed, path)
def register_result(self, dataset, experiment_type, seed, path):
"""Registers a result to be loaded next time.
Args:
dataset (string): the dataset of the result
experiment_type (ExperimentType): the experiment type of result
seed (int): seed of result
path (string): filesystem path of experiment directory
"""
self._running_jobs.append(
JobMetadata(dataset, experiment_type, seed, path))
def __enter__(self):
return self
def __exit__(self, ex_type, ex_value, traceback):
"""Writes _trackers and _running_jobs to file on clean exit"""
# Clean exit
if ex_type is None and ex_value is None and traceback is None:
with open(self.filename, 'w+') as f:
pickle.dump((self._trackers, self._running_jobs,
self._complete_jobs), f)
def _load(self):
if not os.path.exists(self.filename):
self._trackers = {} # name (string) --> Tracker
self._running_jobs = [] # List of jobs to fetch from
self._complete_jobs = [] # List of complete jobs
return
with open(self.filename, 'r') as f:
self._trackers, self._running_jobs, self._complete_jobs = pickle.loads(f.read())
self._refresh_results()
if len(self._running_jobs) != 0:
warn("There are still running jobs or dead jobs: {}".format(self._running_jobs))
warn("You should probably not merge this tracker")
def _refresh_results(self):
"""Fetches all of the running jobs"""
to_remove = []
for index, job in enumerate(self._running_jobs):
accessed, result, timestamp = self._fetch_result(
job.path, job.last_accessed)
if not accessed:
if timestamp == 0:
to_remove.append(index)
else:
job.last_accessed = timestamp
self._update_result(
job.dataset, job.experiment_type, job.seed, result)
# Remove jobs that are dead
for index in reversed(to_remove):
job = self._running_jobs.pop(index)
job.last_accessed = None
self._complete_jobs.append(job)
def _fetch_result(self, exp_path, last_accessed):
"""Fetches the most up to date results if last_accessed is earlier
than the events file timestamp.
Args:
exp_path (string): the path to experiment directory
last_accessed (float): the time in seconds since file was last
accessed, None for never
Returns:
bool: if the result was accessed again
ResultValue: the new result if accessed, otherwise None
float: the new last accessed time
"""
from tensorflow.python.summary import event_accumulator as ea
KEYS = [
'VALID_denoAcc_silent_1utts_1',
'VALID_denoAcc_silent_2utts_1',
'VALID_denoAcc_silent_3utts_1',
'VALID_denoAcc_silent_4utts_1',
'VALID_denoAcc_silent_5utts_1',
'FINAL_denoAcc_silent_1utts_1',
'FINAL_denoAcc_silent_2utts_1',
'FINAL_denoAcc_silent_3utts_1',
'FINAL_denoAcc_silent_4utts_1',
'FINAL_denoAcc_silent_5utts_1',
]
events_file = exp_path + "/tensorboard"
# Last accessed is up to date
if (last_accessed is not None and
os.path.getmtime(exp_path) <= last_accessed):
return False, None, 0
last_accessed = time.time()
print('Reading from', events_file, \
'(could take a while ...)', file=sys.stderr)
acc = ea.EventAccumulator(events_file, size_guidance={ea.SCALARS: 0})
acc.Reload()
available_keys = set(acc.Tags()['scalars'])
values = []
for key in KEYS:
# Key not available to load yet
if key not in available_keys:
warn("No results found for {}".format(exp_path))
print("Perhaps your job has died?")
return False, None, None
if key in available_keys:
values.append([scalar.value for scalar in acc.Scalars(key)])
values = list(zip(*values))
if len(values) == 0:
assert False
best_index, best_value = max(
[(i, sum(value)) for i, value in enumerate(values)],
key=lambda x: x[1])
return True, ResultValue(list(values[best_index][:5]),
list(values[best_index][5:])), last_accessed
@property
def datasets(self):
return iter(self._trackers.keys())
@property
def filename(self):
return DataDirectory.results + "/" + self.name + ".trk"
def __eq__(self, other):
if not isinstance(other, TopLevelTracker):
return False
return self._trackers == other._trackers and self.name == other.name
class LeafTracker(Tracker):
"""A Tracker typically in charge of a single Dataset
Args:
name (string): the name (typically the dataset)
parent (Tracker): A TopLevelTracker
"""
def __init__(self, name, parent=None):
super(LeafTracker, self).__init__(name, parent)
self._entries = {} # ExperimentType --> Entry
def entries(self, experiment_type_filters=None):
"""Returns all entries that substring match strings in
experiment_type_filters
Args:
experiment_type_filters (list[string]): the substrings to match,
None matches everything
Returns:
list[Entry]: all matching entries
"""
filter_fn = lambda entry: self._match(entry, experiment_type_filters)
entries = list(filter(filter_fn, iter(self._entries.values())))
return entries
def add_result(self, experiment_type, seed, result_value):
"""Adds the result value associated with this experiment type and
seed to the Tracker.
Args:
experiment_type (ExperimentType)
seed (int)
result_value (ResultValue): the result
"""
entry = self._entries.setdefault(experiment_type,
Entry(experiment_type))
entry.add_seed(seed, result_value)
def _update_result(self, experiment_type, seed, result_value):
"""Should not get called externally."""
entry = self._entries.setdefault(experiment_type,
Entry(experiment_type))
entry.update_seed(seed, result_value)
def merge(self, other):
for (experiment_type, entry) in other._entries.items():
if experiment_type not in self._entries:
self._entries[experiment_type] = entry
else:
for seed in entry.seeds:
if self._entries[experiment_type].contains_seed(seed):
best_result = max(
[self._entries[experiment_type].get_value(seed),
entry.get_value(seed)])
self._entries[experiment_type].update_seed(
seed, best_result)
else:
self._entries[experiment_type].add_seed(
seed, entry.get_value(seed))
def _load(self):
# TopLevelTrackers are responsible for loading this
return
def __eq__(self, other):
if not isinstance(other, LeafTracker):
return False
return self._entries == other._entries and self.name == other.name
class JobMetadata(object):
"""Light-weight struct for maintaining info about running jobs"""
def __init__(self, dataset, experiment_type, seed, path, last_accessed=None):
self.dataset = dataset
self.experiment_type = experiment_type
self.seed = seed
self.path = path
self.last_accessed = last_accessed
def __getstate__(self):
"""Sets the last_accessed to None when pickling, to be platform
independent. The epoch in OS X is different than the epoch in Linux
distros"""
return (self.dataset, self.experiment_type, self.seed, self.path, self.last_accessed)
def __setstate__(self, state):
dataset, experiment_type, seed, path, last_accessed = state
self.__init__(dataset, experiment_type, seed, path, last_accessed)
def __str__(self):
return "JobMetadata({}, {}, {}, {}, {})".format(
self.experiment_type, self.dataset, self.seed, self.path, self.last_accessed)
__repr__ = __str__
def warn(msg):
print("=" * 10 + "WARNING: " + msg + "=" * 10)
|
ContextualSP/lemon/executor/strongsup/results/tracker.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/results/tracker.py",
"repo_id": "ContextualSP",
"token_count": 5777
}
| 231 |
from gtd.utils import cached_property
from strongsup.executor import Executor, Denotation
from strongsup.predicate import Predicate
from strongsup.utils import EOU
from strongsup.value import Value
from strongsup.tables.structure import (
parse_number,
parse_date,
Date,
ensure_same_type,
InfiniteSet,
NeqInfiniteSet,
RangeInfiniteSet,
GenericDateInfiniteSet,
)
from strongsup.tables.graph import TablesKnowledgeGraph
from strongsup.tables.value import StringValue, NumberValue, DateValue
################################
# Naming Conventions
NUMBER_PREFIX = 'N'
DATE_PREFIX = 'D'
NAME_PREFIX = 'fb:'
REVERSED_NAME_PREFIX = '!fb:'
ASSERT_PREFIX = 'assert-'
TYPE_ROW = 'type-row'
SPECIAL_BINARIES = ('!=', '<', '>', '<=', '>=')
AGGREGATES = ('count', 'min', 'max', 'sum', 'avg')
MERGES = ('and', 'or', 'diff')
BEGIN_GROWS = ('x',)
END_GROWS = ('argmin', 'argmax')
ALL_BUILT_INS = ((TYPE_ROW,)
+ SPECIAL_BINARIES + AGGREGATES + MERGES + BEGIN_GROWS + END_GROWS)
def is_unary_name(x):
return x.startswith(NAME_PREFIX) and x.count('.') == 1
def is_unary(x):
return (x[0] in (NUMBER_PREFIX, DATE_PREFIX) or
(x.startswith(NAME_PREFIX) and x.count('.') == 1))
def parse_unary(x):
"""Return the correct unary object if x represents a unary.
Otherwise, return None."""
if is_unary_name(x):
return x
elif x.startswith(NUMBER_PREFIX):
return parse_number(x[len(NUMBER_PREFIX):])
elif x.startswith(DATE_PREFIX):
return parse_date(x[len(DATE_PREFIX):])
return None
def is_binary_name(x):
return x.startswith(NAME_PREFIX) and x.count('.') == 2
def is_reversed_binary_name(x):
return x.startswith(REVERSED_NAME_PREFIX) and x.count('.') == 2
def is_binary(x):
return (x in SPECIAL_BINARIES or
((x.startswith(NAME_PREFIX) or x.startswith(REVERSED_NAME_PREFIX))
and x.count('.') == 2))
################################
# Helper Decorators
def handle_dict_1arg(fn):
"""Decorator to support a 1-argument operation on dict"""
def wrapped_fn(self, predicate, arg):
if isinstance(arg, dict):
answer = {}
for key, things in arg.items():
answer[key] = fn(self, predicate, things)
return answer
else:
return fn(self, predicate, arg)
wrapped_fn.original_fn = fn
return wrapped_fn
def handle_dict_2args(fn):
"""Decorator to support a 2-argument operation on dict(s)"""
def wrapped_fn(self, predicate, arg1, arg2):
if isinstance(arg1, dict) or isinstance(arg2, dict):
answer = {}
if not isinstance(arg1, dict):
for key, things in arg2.items():
answer[key] = fn(self, predicate, arg1, things)
elif not isinstance(arg2, dict):
for key, things in arg1.items():
answer[key] = fn(self, predicate, things, arg2)
else:
# Both are dicts
for key in set(arg1) | set(arg2):
answer[key] = fn(self, predicate,
arg1.get(key, set()), arg2.get(key, set()))
return answer
else:
return fn(self, predicate, arg1, arg2)
wrapped_fn.original_fn = fn
return wrapped_fn
################################
# Denotation
class TablesDenotation(list, Denotation):
"""A TablesDenotation is a stack of objects.
Each object is either a set (unary) or a dict with sets as values (binary).
See strongsup.tables.structure docstring for more details.
For convenience during execution, TablesDenotation is mutable.
"""
def __init__(self, *args):
list.__init__(self, *args)
if len(args) == 1 and isinstance(args[0], TablesDenotation):
self._utterance_idx = args[0]._utterance_idx
else:
self._utterance_idx = 0
@property
def utterance_idx(self):
return self._utterance_idx
def increment_utterance_idx(self):
self._utterance_idx += 1
################################
# Executor
class TablesPostfixExecutor(Executor):
"""Stack-based executor for the tables domain.
Executes a postfix-encoded logical form on the table knowledge graph.
"""
CACHE_LIMIT = 20000
def __init__(self, graph, debug=False, forbid_partial_empty=True):
"""Construct a new executor.
Args:
graph (TablesKnowledgeGraph): graph to be executed on.
debug (bool): whether to be verbose.
forbid_partial_empty (bool): throw an error if any step produces
an empty denotation. (True by default)
"""
assert isinstance(graph, TablesKnowledgeGraph), \
'Argument graph must be a TablesKnowledgeGraph; got {}'.format(type(graph))
self.graph = graph
self.debug = debug
self.forbid_partial_empty = forbid_partial_empty
self.cache = {}
def execute(self, y_toks, old_denotation=None):
"""Return the denotation of the formula.
Args:
y_toks (list[Predicate]): the formula
old_denotation (TablesDenotation)
Returns:
TablesDenotation
The denotation is not finalized.
Throws:
Exception if the formula is malformed.
"""
if self.debug:
print('Executing: {} (old deno: {})'.format(y_toks, old_denotation))
if old_denotation:
stack = TablesDenotation(old_denotation) # copy
assert stack.utterance_idx == old_denotation.utterance_idx
else:
stack = TablesDenotation()
assert stack.utterance_idx == 0
for predicate in y_toks:
if predicate.name == EOU:
stack.increment_utterance_idx()
else:
self.apply(predicate.name, stack)
if self.debug:
print(predicate, stack)
return stack
def execute_predicate(self, predicate, old_denotation=None):
"""Return the new denotation of the lf when the predicate is added.
Args:
predicate (Predicate)
old_denotation (TablesDenotation)
Returns:
denotation (TablesDenotation)
"""
if predicate.name == EOU:
if old_denotation is None:
denotation = TablesDenotation()
else:
denotation = TablesDenotation(old_denotation)
denotation.increment_utterance_idx()
return denotation
signature = (str(old_denotation), predicate)
if signature in self.cache:
denotation = self.cache[signature]
else:
try:
stack = (TablesDenotation(old_denotation)
if old_denotation else TablesDenotation())
self.apply(predicate.name, stack)
denotation = stack
except Exception as e:
denotation = e
if len(self.cache) < TablesPostfixExecutor.CACHE_LIMIT:
self.cache[signature] = denotation
if isinstance(denotation, TablesDenotation):
old_utterance_idx = (old_denotation.utterance_idx
if old_denotation is not None else 0)
if denotation.utterance_idx != old_utterance_idx:
denotation = TablesDenotation(denotation) # Make a copy
denotation._utterance_idx = old_utterance_idx
return denotation
INVALID_FINAL_DENOTATION = ValueError('Invalid final denotation')
def finalize(self, denotation):
"""Return the finalized denotation as list[Value]."""
if (len(denotation) != 1
or not isinstance(denotation[0], set)
or not denotation[0]):
raise TablesPostfixExecutor.INVALID_FINAL_DENOTATION
values = []
for item in denotation[0]:
if isinstance(item, str):
if not self.graph.has_id(item):
raise TablesPostfixExecutor.INVALID_FINAL_DENOTATION
values.append(StringValue(self.graph.original_string(item)))
elif isinstance(item, float):
values.append(NumberValue(item))
elif isinstance(item, Date):
values.append(DateValue(item.year, item.month, item.day))
else:
# This should not happen.
assert False, "Unknown item type: {}".format(item)
return values
################################
# Internal methods
def apply(self, predicate, stack):
"""Apply the predicate to the stack. The stack is modified in-place.
Args:
predicate (basestring): The next predicate to apply.
stack (TablesDenotation): The current execution stack
"""
# Predefined operations
if predicate in AGGREGATES:
arg = stack.pop()
stack.append(self.apply_aggregate(predicate, arg))
elif predicate in MERGES:
arg2 = stack.pop()
arg1 = stack.pop()
stack.append(self.apply_merge_arith(predicate, arg1, arg2))
elif predicate in BEGIN_GROWS:
arg = stack.pop()
stack.append(self.apply_begin_grow(predicate, arg))
elif predicate in END_GROWS:
arg = stack.pop()
stack.append(self.apply_end_grow(predicate, arg))
# Assert
elif predicate.startswith(ASSERT_PREFIX):
unary = predicate[len(ASSERT_PREFIX):]
assert is_unary(unary)
self.apply_assert(unary, stack[-1])
# Unary or Binary
elif predicate == TYPE_ROW:
stack.append(self.apply_type_row(predicate))
elif is_unary(predicate):
stack.append(self.apply_unary(predicate))
elif is_binary(predicate):
arg = stack.pop()
stack.append(self.apply_join_fast(predicate, arg))
else:
raise ValueError('Unknown predicate {}'.format(predicate))
# Optional: Check if the partial denotation is empty.
if self.forbid_partial_empty:
if (not stack[-1] or (isinstance(stack[-1], dict) and
all(not x for x in stack[-1].values()))):
raise self.EMPTY_EXCEPTION
EMPTY_EXCEPTION = ValueError('Denotation is empty!')
################################
# Operators
def apply_unary(self, predicate):
unary = parse_unary(predicate)
if (isinstance(unary, Date) and
(unary.year == -1 or unary.month == -1 or unary.day == -1)):
return GenericDateInfiniteSet(unary)
else:
return {unary}
def apply_type_row(self, predicate):
return self.graph.all_rows
@handle_dict_1arg
def apply_join(self, predicate, arg):
assert isinstance(predicate, str), str(predicate)
assert isinstance(arg, (set, InfiniteSet)), str(arg)
if predicate in SPECIAL_BINARIES:
if predicate == '!=':
assert len(arg) == 1, '{} takes exactly 1 object; got {}'.format(predicate, arg)
thing = next(iter(arg))
return NeqInfiniteSet(thing)
elif predicate in ('<', '<=', '>', '>='):
if isinstance(arg, GenericDateInfiniteSet):
arg = [arg.min_()] if predicate in ('<', '>=') else [arg.max_()]
assert len(arg) == 1, '{} takes exactly 1 object; got {}'.format(predicate, arg)
thing = next(iter(arg))
return RangeInfiniteSet(predicate, thing)
else:
raise NotImplementedError(predicate)
elif is_binary_name(predicate):
return self.graph.join(predicate, arg)
elif is_reversed_binary_name(predicate):
return self.graph.reversed_join(predicate[1:], arg)
else:
raise NotImplementedError(predicate)
JOIN_EXCEPTION = ValueError('Join Exception!')
def apply_join_fast(self, predicate, arg):
if predicate == '!=':
if isinstance(arg, dict):
answer = {}
for key, thing in arg.items():
if len(thing) != 1:
raise self.JOIN_EXCEPTION
answer[key] = NeqInfiniteSet(next(iter(thing)))
return answer
elif len(arg) != 1:
raise self.JOIN_EXCEPTION
return NeqInfiniteSet(next(iter(arg)))
elif predicate in ('<', '<=', '>', '>='):
if isinstance(arg, dict):
answer = {}
for key, thing in arg.items():
if isinstance(thing, GenericDateInfiniteSet):
thing = [thing.min_()] if predicate in ('<', '>=') else [thing.max_()]
if len(thing) != 1:
raise self.JOIN_EXCEPTION
answer[key] = RangeInfiniteSet(predicate, next(iter(thing)))
return answer
else:
if isinstance(arg, GenericDateInfiniteSet):
arg = [arg.min_()] if predicate in ('<', '>=') else [arg.max_()]
if len(arg) != 1:
raise self.JOIN_EXCEPTION
return RangeInfiniteSet(predicate, next(iter(arg)))
elif predicate[0] == '!':
relation = predicate[1:]
if isinstance(arg, dict):
return {key: self.graph.reversed_join(relation, things)
for (key, things) in arg.items()}
return self.graph.reversed_join(relation, arg)
else:
if isinstance(arg, dict):
return {key: self.graph.join(predicate, things)
for (key, things) in arg.items()}
return self.graph.join(predicate, arg)
def apply_assert(self, unary, stack_top):
assert isinstance(stack_top, set), 'Stack top {} is not a set'.format(stack_top)
assert len(stack_top) == 1, 'Stack top {} has size more than 1'.format(stack_top)
thing = next(iter(stack_top))
assert parse_unary(unary) == thing
@handle_dict_1arg
def apply_aggregate(self, predicate, arg):
if predicate == 'count':
return {float(len(arg))}
agreed_type = ensure_same_type(arg, ['N', 'D'])
if predicate == 'max':
return {max(arg)}
if predicate == 'min':
return {min(arg)}
assert agreed_type == 'N', 'Cannot do {} over non-numbers'.format(predicate)
if predicate == 'sum':
return {sum(arg)}
if predicate == 'avg':
return {sum(arg) / len(arg)}
raise NotImplementedError(predicate)
@handle_dict_2args
def apply_merge_arith(self, predicate, arg1, arg2):
if predicate in ('and', 'or'):
return (arg1 & arg2) if predicate == 'and' else (arg1 | arg2)
elif predicate == 'diff':
assert isinstance(arg1, set) and isinstance(arg2, set)
assert len(arg1) == 1 or len(arg2) == 1, 'One of diff arguments must have size 1'
if len(arg1) == 1:
return {abs(x - next(iter(arg1))) for x in arg2}
else:
return {abs(x - next(iter(arg2))) for x in arg1}
raise NotImplementedError(predicate)
def apply_begin_grow(self, predicate, arg):
assert isinstance(arg, set), \
'begin_grow only operates on a finite unary; got {}'.format(arg)
return dict((x, {x}) for x in arg)
def apply_end_grow(self, predicate, arg):
assert isinstance(arg, dict), \
'end_grow only operates on a dict; got {}'.format(arg)
agreed_type = ensure_same_type(arg, ['N', 'D'])
best_keys = set()
best_value = None
for key, values in arg.items():
for value in values:
if (best_value is None
or (predicate == 'argmin' and value < best_value)
or (predicate == 'argmax' and value > best_value)):
best_value = value
best_keys = {key}
elif value == best_value:
best_keys.add(key)
return best_keys
################################
# For profiling
def add_decorated_methods(profiler):
for k, v in list(TablesPostfixExecutor.__dict__.items()):
if hasattr(v, 'original_fn'):
print('Adding function {} to profiler'.format(k))
profiler.add_function(v)
profiler.add_function(v.original_fn)
|
ContextualSP/lemon/executor/strongsup/tables/executor.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/tables/executor.py",
"repo_id": "ContextualSP",
"token_count": 7863
}
| 232 |
import pytest
from strongsup.tables.structure import (
parse_number, parse_date, parse_value, Date,
get_type, ensure_same_type,
NeqInfiniteSet, RangeInfiniteSet, GenericDateInfiniteSet,
)
class TestValues(object):
def test_date(self):
assert Date(2012, 12, -1) == Date(2012, 12, -1)
assert len({Date(-1, 4, 14), Date(-1, 4, 14)}) == 1
with pytest.raises(Exception):
Date(-1, -1, -1)
with pytest.raises(Exception):
Date(1990, 0, 12)
with pytest.raises(Exception):
Date(1990, 4, 32)
assert Date(2012, 8, -1) < Date(2012, 12, 4)
# Not sure if this is the behavior we want ...
assert Date(-1, 8, 24) < Date(2012, 8, 29)
with pytest.raises(Exception):
# Cannot compare across types
Date(1984, -1, -1) > 1985.0
def test_parse_value(self):
assert parse_number('2.3') == 2.3
assert parse_number('-4') == -4
with pytest.raises(Exception):
parse_number('3.45m')
assert parse_date('1961-08-04') == Date(1961, 8, 4)
assert parse_date('XXXX-12-xx') == Date(-1, 12, -1)
with pytest.raises(Exception):
parse_date('xx-xx-xx')
assert parse_value('10') == 10.0
assert parse_value('-3.14') == -3.14
assert parse_value('xx-8-24') == Date(-1, 8 ,24)
assert parse_value('40 kg') == '40 kg'
assert parse_value('xx-xx-xx') == 'xx-xx-xx'
def test_get_type(self):
assert get_type(4.0) == 'N'
assert get_type(Date(-1, -1, 2)) == 'D'
assert get_type('fb:cell.puppy') == 'fb:cell'
with pytest.raises(Exception):
get_type('argmax')
with pytest.raises(Exception):
get_type('fb:row.row.name')
def test_ensure_same_type(self):
assert ensure_same_type({4.0}) == 'N'
assert ensure_same_type({'fb:cell.puppy': {4.0}, 'fb:cell.kitten': {6.0, 7.0}}) == 'N'
assert ensure_same_type({Date(2010, 1, 2): {4.0}, 'fb:cell.kitten': {6.0, 7.0}}) == 'N'
assert ensure_same_type({4.0, 5.0, 20.0, -2.5}) == 'N'
assert ensure_same_type({4.0, 5.0, 20.0, -2.5}, 'N') == 'N'
assert ensure_same_type({4.0, 5.0, 20.0, -2.5}, ['D', 'N']) == 'N'
assert ensure_same_type({Date(-1, 11, 14), Date(-1, 12, 3)}) == 'D'
assert ensure_same_type({'fb:cell.puppy', 'fb:cell.kitten'}) == 'fb:cell'
assert ensure_same_type({'fb:cell.puppy', 'fb:cell.kitten'}, 'fb:cell') == 'fb:cell'
assert ensure_same_type({x: {(x*1.)**y for y in range(x)} for x in [2, 3, 5, 7]}) == 'N'
assert ensure_same_type({x: {'fb:hello.' + str(y) for y in range(x)} for x in [2, 3, 5, 7]}) == 'fb:hello'
with pytest.raises(ValueError):
ensure_same_type('4.0')
with pytest.raises(ValueError):
ensure_same_type(set())
with pytest.raises(ValueError):
ensure_same_type(set(), 'N')
with pytest.raises(ValueError):
ensure_same_type({4.0: set(), 5.0: set()}, 'D')
with pytest.raises(ValueError):
ensure_same_type({4.0: {5.0}, 6.0: {2.0, 'fb:cell.kitten'}})
with pytest.raises(ValueError):
ensure_same_type({'fb:row.row.name'})
with pytest.raises(ValueError):
ensure_same_type({2.25, 4.6, -5}, 'D')
with pytest.raises(ValueError):
ensure_same_type({'fb:part.puppy': {1.2}, 'fb:cell.kitten': {2.4}}, ['D', 'fb:cell'])
class TestInfiniteSet(object):
def test_neq(self):
a = NeqInfiniteSet(3.0)
assert 3.0 not in a
assert 6.0 in a
assert Date(2010, 1, 2) not in a
assert 'fb:cell.puppy' not in a
a = NeqInfiniteSet(Date(2010, 1, 2))
assert 3.0 not in a
assert Date(2010, 1, 2) not in a
assert Date(2010, -1, 2) in a
assert 'fb:cell.puppy' not in a
a = NeqInfiniteSet('fb:cell.puppy')
assert 'fb:cell.puppy' not in a
assert 'fb:cell.kitten' in a
assert 'fb:part.robot' not in a
def test_neq_and(self):
assert NeqInfiniteSet(3.0) & {3.0, 4.0, Date(2010, 1, 2)} == {4.0}
assert {3.0, 4.0, Date(2010, 1, 2)} & NeqInfiniteSet(3.0) == {4.0}
assert NeqInfiniteSet(Date(2010, -1, 2)) & \
{3.0, 4.0, Date(2010, 1, 2), Date(2010, -1, 2), Date(2010, -1, -1)} == \
{Date(2010, 1, 2), Date(2010, -1, -1)}
def test_basic_range(self):
a = RangeInfiniteSet('>', 4.0)
assert 2.0 not in a
assert 4.0 not in a
assert 8.0 in a
assert Date(2010, -1, -1) not in a
a = RangeInfiniteSet('>=', 4.0)
assert 2.0 not in a
assert 4.0 in a
assert 8.0 in a
a = RangeInfiniteSet('<', 4.0)
assert 2.0 in a
assert 4.0 not in a
assert 8.0 not in a
a = RangeInfiniteSet('<=', 4.0)
assert 2.0 in a
assert 4.0 in a
assert 8.0 not in a
a = RangeInfiniteSet('>', 4.0, '<=', 8.0)
assert 2.0 not in a
assert 4.0 not in a
assert 6.0 in a
assert 8.0 in a
assert 10.0 not in a
assert 'fb:cell.obama' not in a
def test_date_range(self):
a = RangeInfiniteSet('>', Date(2010, 2, 14), '<=', Date(2011, 12, 1))
assert Date(2010, 2, 13) not in a
assert Date(2010, 2, 14) not in a
assert Date(2010, 2, 15) in a
assert Date(2010, 3, 1) in a
assert Date(2011, 2, 1) in a
assert Date(2011, 12, 1) in a
assert Date(2012, 5, 7) not in a
def test_range_and(self):
a = RangeInfiniteSet('<', 4.0)
b = RangeInfiniteSet('<', 1.0)
c = a & b
assert 0.0 in c
assert 1.0 not in c
assert 4.0 not in c
assert a & {0.0, 1.0, 4.0, 'fb:cell.puppy'} == {0.0, 1.0}
assert {0.0, 1.0, 4.0, 'fb:cell.puppy'} & a == {0.0, 1.0}
a = RangeInfiniteSet('>=', 4.0, '<', 10.0)
b = RangeInfiniteSet('<', 7.0, '>=', 2.0)
c = a & b
assert 2.0 not in c
assert 4.0 in c
assert 6.0 in c
assert 7.0 not in c
assert 10.0 not in c
a = RangeInfiniteSet('>', 4.0)
b = RangeInfiniteSet('<', 4.0)
assert a & b == set()
a = RangeInfiniteSet('>=', 4.0)
b = RangeInfiniteSet('<=', 4.0)
assert a & b == {4.0}
a = RangeInfiniteSet('>=', 4.0)
b = RangeInfiniteSet('<', 4.0)
assert a & b == set()
a = RangeInfiniteSet('>', 4.0, '<', 8.0)
b = RangeInfiniteSet('<', 4.0)
assert a & b == set()
a = RangeInfiniteSet('>=', 4.0, '<=', 8.0)
b = RangeInfiniteSet('<=', 4.0)
assert a & b == {4.0}
def test_generic_date(self):
a = GenericDateInfiniteSet(Date(2010, 4, -1))
assert Date(2010, 4, 2) in a
assert Date(2010, 5, 3) not in a
assert Date(2010, -1, -1) not in a
assert 4.0 not in a
assert a.min_() == Date(2010, 4, 1)
assert a.max_() == Date(2010, 4, 30)
a = GenericDateInfiniteSet(Date(-1, 4, 20))
assert Date(2010, 4, 20) in a
assert Date(2010, 5, 20) not in a
assert Date(-1, 4, -1) not in a
assert 4.0 not in a
assert a.min_() == a.max_() == Date(-1, 4, 20)
def test_generic_date_and(self):
a = GenericDateInfiniteSet(Date(-1, 4, -1))
assert a & {Date(2010, 4, 2), Date(2010, 5, 3), Date(2011, 4, 7)} == \
{Date(2010, 4, 2), Date(2011, 4, 7)}
assert {Date(2010, 4, 2), Date(2010, 5, 3), Date(2011, 4, 7)} & a== \
{Date(2010, 4, 2), Date(2011, 4, 7)}
|
ContextualSP/lemon/executor/strongsup/tests/tables/test_structure.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/tests/tables/test_structure.py",
"repo_id": "ContextualSP",
"token_count": 4012
}
| 233 |
{"id":"Mercury_7175875","answerKey":"C"}
{"id":"Mercury_SC_409171","answerKey":"B"}
{"id":"Mercury_SC_408547","answerKey":"C"}
{"id":"Mercury_407327","answerKey":"D"}
{"id":"MCAS_2006_9_44","answerKey":"D"}
{"id":"Mercury_7270393","answerKey":"B"}
{"id":"MCAS_2014_5_7","answerKey":"C"}
{"id":"Mercury_7086660","answerKey":"C"}
{"id":"Mercury_7168805","answerKey":"B"}
{"id":"MCAS_2003_8_11","answerKey":"A"}
{"id":"Mercury_7250058","answerKey":"B"}
{"id":"Mercury_7012740","answerKey":"A"}
{"id":"Mercury_LBS10610","answerKey":"C"}
{"id":"Mercury_SC_407400","answerKey":"C"}
{"id":"Mercury_7212993","answerKey":"C"}
{"id":"Mercury_SC_413240","answerKey":"A"}
{"id":"Mercury_7186358","answerKey":"C"}
{"id":"Mercury_7166425","answerKey":"B"}
{"id":"MDSA_2007_8_3","answerKey":"A"}
{"id":"Mercury_7094290","answerKey":"C"}
{"id":"Mercury_7186568","answerKey":"B"}
{"id":"Mercury_402216","answerKey":"B"}
{"id":"Mercury_404894","answerKey":"A"}
{"id":"MCAS_2002_8_11","answerKey":"C"}
{"id":"Mercury_SC_405086","answerKey":"B"}
{"id":"Mercury_SC_408324","answerKey":"D"}
{"id":"Mercury_7218820","answerKey":"B"}
{"id":"Mercury_412202","answerKey":"B"}
{"id":"Mercury_SC_409139","answerKey":"C"}
{"id":"Mercury_400687","answerKey":"B"}
{"id":"Mercury_7171605","answerKey":"D"}
{"id":"Mercury_7210245","answerKey":"C"}
{"id":"AKDE&ED_2008_4_25","answerKey":"A"}
{"id":"AKDE&ED_2008_4_19","answerKey":"C"}
{"id":"Mercury_SC_400402","answerKey":"A"}
{"id":"Mercury_7234308","answerKey":"A"}
{"id":"ACTAAP_2014_5_8","answerKey":"B"}
{"id":"Mercury_400407","answerKey":"B"}
{"id":"Mercury_7116288","answerKey":"C"}
{"id":"MCAS_2004_9_15-v1","answerKey":"B"}
{"id":"NYSEDREGENTS_2015_4_26","answerKey":"C"}
{"id":"Mercury_SC_401620","answerKey":"A"}
{"id":"Mercury_400877","answerKey":"C"}
{"id":"Mercury_7174213","answerKey":"D"}
{"id":"NYSEDREGENTS_2008_8_34","answerKey":"2"}
{"id":"Mercury_7212398","answerKey":"B"}
{"id":"Mercury_SC_401290","answerKey":"C"}
{"id":"Mercury_SC_402120","answerKey":"C"}
{"id":"Mercury_184975","answerKey":"C"}
{"id":"Mercury_SC_400578","answerKey":"A"}
{"id":"MCAS_2001_8_4","answerKey":"D"}
{"id":"MCAS_2003_5_33","answerKey":"C"}
{"id":"Mercury_7068513","answerKey":"A"}
{"id":"AKDE&ED_2008_4_26","answerKey":"C"}
{"id":"Mercury_7235638","answerKey":"C"}
{"id":"MDSA_2009_5_20","answerKey":"B"}
{"id":"Mercury_178325","answerKey":"C"}
{"id":"Mercury_7212678","answerKey":"A"}
{"id":"TAKS_2009_8_32","answerKey":"A"}
{"id":"Mercury_412681","answerKey":"C"}
{"id":"Mercury_400440","answerKey":"D"}
{"id":"Mercury_SC_416529","answerKey":"B"}
{"id":"MCAS_2006_8_1","answerKey":"D"}
{"id":"TIMSS_2003_8_pg80","answerKey":"C"}
{"id":"Mercury_416645","answerKey":"D"}
{"id":"Mercury_406777","answerKey":"A"}
{"id":"Mercury_LBS11018","answerKey":"A"}
{"id":"Mercury_7139878","answerKey":"C"}
{"id":"Mercury_417147","answerKey":"B"}
{"id":"Mercury_7016765","answerKey":"A"}
{"id":"Mercury_415303","answerKey":"A"}
{"id":"Mercury_7215845","answerKey":"C"}
{"id":"Mercury_7136885","answerKey":"B"}
{"id":"Mercury_SC_400059","answerKey":"D"}
{"id":"Mercury_7044328","answerKey":"D"}
{"id":"MEA_2010_8_1","answerKey":"A"}
{"id":"Mercury_414099","answerKey":"B"}
{"id":"Mercury_410807","answerKey":"B"}
{"id":"Mercury_403234","answerKey":"B"}
{"id":"Mercury_7011323","answerKey":"A"}
{"id":"Mercury_7109463","answerKey":"D"}
{"id":"Mercury_SC_401277","answerKey":"C"}
{"id":"MCAS_2005_5_25","answerKey":"C"}
{"id":"Mercury_SC_401272","answerKey":"D"}
{"id":"Mercury_7103600","answerKey":"D"}
{"id":"MDSA_2009_8_2","answerKey":"A"}
{"id":"Mercury_7127943","answerKey":"A"}
{"id":"ACTAAP_2009_7_8","answerKey":"D"}
{"id":"MCAS_2006_9_43","answerKey":"C"}
{"id":"Mercury_7252088","answerKey":"C"}
{"id":"Mercury_7084665","answerKey":"A"}
{"id":"FCAT_2008_5_2","answerKey":"D"}
{"id":"Mercury_SC_414041","answerKey":"D"}
{"id":"MCAS_2014_8_20","answerKey":"A"}
{"id":"Mercury_SC_401116","answerKey":"B"}
{"id":"Mercury_7064680","answerKey":"C"}
{"id":"Mercury_7211680","answerKey":"C"}
{"id":"Mercury_180373","answerKey":"B"}
{"id":"Mercury_7216248","answerKey":"B"}
{"id":"Mercury_SC_417677","answerKey":"B"}
{"id":"Mercury_7221655","answerKey":"C"}
{"id":"MCAS_2006_9_12","answerKey":"D"}
{"id":"MCAS_2004_9_2","answerKey":"A"}
{"id":"Mercury_180005","answerKey":"B"}
{"id":"Mercury_7071523","answerKey":"C"}
{"id":"Mercury_7263375","answerKey":"B"}
{"id":"TIMSS_2011_8_pg102","answerKey":"D"}
{"id":"Mercury_406550","answerKey":"B"}
{"id":"Mercury_SC_400057","answerKey":"D"}
{"id":"TAKS_2009_5_26","answerKey":"A"}
{"id":"LEAP_2007_8_10417","answerKey":"A"}
{"id":"Mercury_7027405","answerKey":"A"}
{"id":"Mercury_7058415","answerKey":"A"}
{"id":"Mercury_7215828","answerKey":"A"}
{"id":"Mercury_7064575","answerKey":"B"}
{"id":"Mercury_7097493","answerKey":"D"}
{"id":"AKDE&ED_2008_8_47","answerKey":"B"}
{"id":"Mercury_405136","answerKey":"C"}
{"id":"Mercury_415086","answerKey":"D"}
{"id":"Mercury_7228725","answerKey":"C"}
{"id":"Mercury_7201740","answerKey":"A"}
{"id":"NYSEDREGENTS_2010_4_4","answerKey":"A"}
{"id":"MEAP_2005_8_21","answerKey":"D"}
{"id":"Mercury_7026355","answerKey":"A"}
{"id":"Mercury_7249708","answerKey":"B"}
{"id":"Mercury_7107170","answerKey":"D"}
{"id":"Mercury_183820","answerKey":"A"}
{"id":"Mercury_SC_401357","answerKey":"B"}
{"id":"NYSEDREGENTS_2008_8_11","answerKey":"4"}
{"id":"Mercury_416650","answerKey":"C"}
{"id":"NCEOGA_2013_5_20","answerKey":"B"}
{"id":"Mercury_400500","answerKey":"C"}
{"id":"Mercury_SC_401366","answerKey":"D"}
{"id":"Mercury_7141610","answerKey":"B"}
{"id":"Mercury_7247013","answerKey":"B"}
{"id":"NYSEDREGENTS_2008_8_30","answerKey":"2"}
{"id":"ACTAAP_2011_5_16","answerKey":"B"}
{"id":"Mercury_7093153","answerKey":"B"}
{"id":"Mercury_7013965","answerKey":"A"}
{"id":"Mercury_7034843","answerKey":"B"}
{"id":"Mercury_SC_407610","answerKey":"D"}
{"id":"Mercury_405947","answerKey":"C"}
{"id":"AKDE&ED_2012_8_6","answerKey":"A"}
{"id":"Mercury_7011130","answerKey":"A"}
{"id":"Mercury_LBS11022","answerKey":"A"}
{"id":"TIMSS_1995_8_J1","answerKey":"C"}
{"id":"Mercury_SC_408366","answerKey":"B"}
{"id":"Mercury_7009993","answerKey":"A"}
{"id":"Mercury_401699","answerKey":"C"}
{"id":"Mercury_7056858","answerKey":"A"}
{"id":"Mercury_7027160","answerKey":"C"}
{"id":"Mercury_400811","answerKey":"D"}
{"id":"Mercury_SC_400062","answerKey":"D"}
{"id":"Mercury_400699","answerKey":"B"}
{"id":"Mercury_7029803","answerKey":"B"}
{"id":"Mercury_SC_401372","answerKey":"D"}
{"id":"Mercury_7271128","answerKey":"C"}
{"id":"Mercury_407260","answerKey":"B"}
{"id":"Mercury_SC_416155","answerKey":"C"}
{"id":"Mercury_402145","answerKey":"B"}
{"id":"AIMS_2009_4_5","answerKey":"A"}
{"id":"TIMSS_2003_4_pg7","answerKey":"D"}
{"id":"Mercury_7142415","answerKey":"A"}
{"id":"Mercury_7212818","answerKey":"B"}
{"id":"Mercury_SC_413299","answerKey":"A"}
{"id":"Mercury_7132020","answerKey":"A"}
{"id":"MEA_2014_8_10","answerKey":"C"}
{"id":"TIMSS_1995_8_N2","answerKey":"B"}
{"id":"Mercury_7024465","answerKey":"A"}
{"id":"Mercury_SC_415762","answerKey":"D"}
{"id":"Mercury_415093","answerKey":"D"}
{"id":"LEAP_2005_8_10404","answerKey":"D"}
{"id":"AIMS_2008_8_6","answerKey":"B"}
{"id":"Mercury_7057173","answerKey":"D"}
{"id":"TIMSS_2007_8_pg60","answerKey":"C"}
{"id":"AIMS_2009_8_14","answerKey":"A"}
{"id":"Mercury_185010","answerKey":"A"}
{"id":"Mercury_7206938","answerKey":"A"}
{"id":"Mercury_402501","answerKey":"B"}
{"id":"MCAS_2011_8_15365","answerKey":"C"}
{"id":"Mercury_SC_401766","answerKey":"A"}
{"id":"Mercury_7162400","answerKey":"D"}
{"id":"Mercury_7086695","answerKey":"C"}
{"id":"Mercury_SC_402994","answerKey":"C"}
{"id":"Mercury_7056298","answerKey":"A"}
{"id":"Mercury_409115","answerKey":"C"}
{"id":"Mercury_409647","answerKey":"D"}
{"id":"Mercury_414352","answerKey":"A"}
{"id":"Mercury_185325","answerKey":"C"}
{"id":"Mercury_SC_412374","answerKey":"C"}
{"id":"Mercury_SC_401818","answerKey":"C"}
{"id":"Mercury_SC_413549","answerKey":"B"}
{"id":"Mercury_7093958","answerKey":"A"}
{"id":"Mercury_7102323","answerKey":"D"}
{"id":"Mercury_7222793","answerKey":"B"}
{"id":"Mercury_SC_400701","answerKey":"A"}
{"id":"Mercury_409301","answerKey":"B"}
{"id":"Mercury_SC_400383","answerKey":"C"}
{"id":"CSZ_2005_5_CSZ10021","answerKey":"B"}
{"id":"Mercury_SC_407070","answerKey":"B"}
{"id":"Mercury_SC_400708","answerKey":"C"}
{"id":"Mercury_7075040","answerKey":"C"}
{"id":"Mercury_7137165","answerKey":"A"}
{"id":"Mercury_SC_400046","answerKey":"B"}
{"id":"Mercury_7099330","answerKey":"D"}
{"id":"MDSA_2007_5_2","answerKey":"C"}
{"id":"Mercury_7271758","answerKey":"B"}
{"id":"MCAS_2003_8_31","answerKey":"C"}
{"id":"AKDE&ED_2008_8_53","answerKey":"C"}
{"id":"TIMSS_2007_8_pg109","answerKey":"C"}
{"id":"Mercury_175385","answerKey":"C"}
{"id":"Mercury_410669","answerKey":"D"}
{"id":"MEAP_2005_8_39","answerKey":"C"}
{"id":"Mercury_SC_408568","answerKey":"B"}
{"id":"AKDE&ED_2008_8_7","answerKey":"B"}
{"id":"Mercury_7082845","answerKey":"B"}
{"id":"Mercury_SC_405726","answerKey":"D"}
{"id":"Mercury_SC_415407","answerKey":"A"}
{"id":"Mercury_SC_401792","answerKey":"C"}
{"id":"LEAP_2000_8_4","answerKey":"B"}
{"id":"Mercury_SC_413439","answerKey":"A"}
{"id":"ACTAAP_2014_7_13","answerKey":"B"}
{"id":"Mercury_SC_402638","answerKey":"A"}
{"id":"Mercury_SC_406725","answerKey":"A"}
{"id":"NYSEDREGENTS_2015_4_29","answerKey":"A"}
{"id":"Mercury_406136","answerKey":"A"}
{"id":"MSA_2012_5_23","answerKey":"C"}
{"id":"Mercury_405873","answerKey":"B"}
{"id":"Mercury_7043820","answerKey":"B"}
{"id":"MCAS_2005_5_34","answerKey":"B"}
{"id":"Mercury_7182245","answerKey":"D"}
{"id":"MSA_2012_8_30","answerKey":"A"}
{"id":"Mercury_7252753","answerKey":"A"}
{"id":"TAKS_2009_8_36","answerKey":"B"}
{"id":"Mercury_SC_415473","answerKey":"C"}
{"id":"Mercury_SC_413624","answerKey":"B"}
{"id":"Mercury_7016800","answerKey":"D"}
{"id":"Mercury_SC_407228","answerKey":"D"}
{"id":"Mercury_414504","answerKey":"B"}
{"id":"TIMSS_2011_4_pg27","answerKey":"A"}
{"id":"Mercury_SC_402029","answerKey":"D"}
{"id":"Mercury_7131845","answerKey":"C"}
{"id":"Mercury_SC_405533","answerKey":"B"}
{"id":"Mercury_7086748","answerKey":"A"}
{"id":"MDSA_2007_8_17","answerKey":"B"}
{"id":"Mercury_7210473","answerKey":"C"}
{"id":"Mercury_7214340","answerKey":"A"}
{"id":"MCAS_2005_9_17","answerKey":"C"}
{"id":"MEA_2016_8_12","answerKey":"B"}
{"id":"Mercury_SC_401278","answerKey":"B"}
{"id":"Mercury_SC_407689","answerKey":"D"}
{"id":"Mercury_7230405","answerKey":"B"}
{"id":"Mercury_SC_405640","answerKey":"A"}
{"id":"Mercury_7201775","answerKey":"D"}
{"id":"Mercury_7177398","answerKey":"D"}
{"id":"Mercury_7041423","answerKey":"A"}
{"id":"Mercury_7004743","answerKey":"D"}
{"id":"Mercury_7198468","answerKey":"A"}
{"id":"MEA_2014_5_11","answerKey":"C"}
{"id":"Mercury_410602","answerKey":"A"}
{"id":"Mercury_7108868","answerKey":"C"}
{"id":"Mercury_7033828","answerKey":"D"}
{"id":"TIMSS_2007_4_pg19","answerKey":"A"}
{"id":"Mercury_400828","answerKey":"D"}
{"id":"VASoL_2008_3_16","answerKey":"B"}
{"id":"LEAP__5_10315","answerKey":"C"}
{"id":"Mercury_SC_415471","answerKey":"A"}
{"id":"Mercury_7247065","answerKey":"C"}
{"id":"MDSA_2011_5_3","answerKey":"B"}
{"id":"MDSA_2009_5_39","answerKey":"C"}
{"id":"Mercury_187198","answerKey":"C"}
{"id":"MCAS_2000_4_36","answerKey":"A"}
{"id":"Mercury_184100","answerKey":"C"}
{"id":"Mercury_LBS10814","answerKey":"B"}
{"id":"Mercury_SC_408384","answerKey":"B"}
{"id":"Mercury_7043068","answerKey":"D"}
{"id":"Mercury_411071","answerKey":"B"}
{"id":"NYSEDREGENTS_2010_4_24","answerKey":"B"}
{"id":"Mercury_SC_409673","answerKey":"D"}
{"id":"Mercury_SC_400374","answerKey":"B"}
{"id":"CSZ_2009_8_CSZ20740","answerKey":"B"}
{"id":"Mercury_SC_406482","answerKey":"D"}
{"id":"OHAT_2007_8_24","answerKey":"A"}
{"id":"Mercury_188335","answerKey":"B"}
{"id":"Mercury_7128555","answerKey":"B"}
{"id":"Mercury_407517","answerKey":"C"}
{"id":"Mercury_405950","answerKey":"C"}
{"id":"MCAS_2004_9_5","answerKey":"C"}
{"id":"NCEOGA_2013_8_28","answerKey":"B"}
{"id":"Mercury_SC_406451","answerKey":"C"}
{"id":"Mercury_7109323","answerKey":"D"}
{"id":"Mercury_404132","answerKey":"B"}
{"id":"Mercury_7210210","answerKey":"C"}
{"id":"Mercury_SC_408042","answerKey":"B"}
{"id":"MCAS_2004_8_14","answerKey":"C"}
{"id":"TIMSS_2011_4_pg5","answerKey":"B"}
{"id":"Mercury_SC_406833","answerKey":"C"}
{"id":"Mercury_7029558","answerKey":"C"}
{"id":"Mercury_7138390","answerKey":"B"}
{"id":"MEAP_2005_5_12","answerKey":"A"}
{"id":"MCAS_2000_4_30","answerKey":"D"}
{"id":"MCAS_1998_4_12","answerKey":"C"}
{"id":"Mercury_175840","answerKey":"B"}
{"id":"Mercury_7099190","answerKey":"C"}
{"id":"Mercury_SC_401605","answerKey":"A"}
{"id":"TAKS_2009_5_36","answerKey":"A"}
{"id":"Mercury_7171570","answerKey":"D"}
{"id":"Mercury_SC_402057","answerKey":"C"}
{"id":"Mercury_SC_413628","answerKey":"C"}
{"id":"Mercury_LBS10131","answerKey":"C"}
{"id":"Mercury_7032428","answerKey":"B"}
{"id":"Mercury_7025008","answerKey":"A"}
{"id":"MEA_2011_8_19","answerKey":"D"}
{"id":"NYSEDREGENTS_2008_8_27","answerKey":"3"}
{"id":"VASoL_2007_5_22","answerKey":"B"}
{"id":"NCEOGA_2013_5_19","answerKey":"A"}
{"id":"Mercury_7037555","answerKey":"C"}
{"id":"Mercury_402132","answerKey":"B"}
{"id":"MCAS_2006_8_24","answerKey":"A"}
{"id":"Mercury_7128923","answerKey":"B"}
{"id":"Mercury_416379","answerKey":"A"}
{"id":"Mercury_7168053","answerKey":"C"}
{"id":"AKDE&ED_2008_8_1","answerKey":"A"}
{"id":"Mercury_SC_415476","answerKey":"B"}
{"id":"Mercury_7106960","answerKey":"A"}
{"id":"Mercury_7160563","answerKey":"C"}
{"id":"Mercury_7068583","answerKey":"B"}
{"id":"Mercury_404638","answerKey":"D"}
{"id":"Mercury_SC_407138","answerKey":"A"}
{"id":"MCAS_2000_4_10","answerKey":"D"}
{"id":"Mercury_177748","answerKey":"D"}
{"id":"MCAS_2004_9_21-v1","answerKey":"C"}
{"id":"MDSA_2007_5_16","answerKey":"A"}
{"id":"Mercury_401763","answerKey":"B"}
{"id":"Mercury_7268118","answerKey":"C"}
{"id":"Mercury_403232","answerKey":"D"}
{"id":"Mercury_415081","answerKey":"A"}
{"id":"Mercury_7206378","answerKey":"B"}
{"id":"CSZ30169","answerKey":"A"}
{"id":"Mercury_7013948","answerKey":"D"}
{"id":"Mercury_SC_402164","answerKey":"C"}
{"id":"Mercury_400880","answerKey":"C"}
{"id":"Mercury_7040793","answerKey":"B"}
{"id":"MDSA_2010_5_29","answerKey":"D"}
{"id":"LEAP__8_10365","answerKey":"A"}
{"id":"Mercury_SC_401295","answerKey":"B"}
{"id":"MCAS_2012_5_23625","answerKey":"C"}
{"id":"Mercury_7268048","answerKey":"C"}
{"id":"Mercury_SC_402629","answerKey":"A"}
{"id":"NCEOGA_2013_8_42","answerKey":"D"}
{"id":"Mercury_412463","answerKey":"D"}
{"id":"Mercury_409295","answerKey":"B"}
{"id":"Mercury_404609","answerKey":"D"}
{"id":"Mercury_7230090","answerKey":"C"}
{"id":"Mercury_7057488","answerKey":"D"}
{"id":"MDSA_2009_4_1","answerKey":"D"}
{"id":"Mercury_7150728","answerKey":"A"}
{"id":"Mercury_402207","answerKey":"B"}
{"id":"Mercury_411732","answerKey":"D"}
{"id":"Mercury_7270113","answerKey":"C"}
{"id":"AKDE&ED_2008_8_3","answerKey":"B"}
{"id":"MCAS_1999_8_1","answerKey":"A"}
{"id":"NYSEDREGENTS_2015_4_24","answerKey":"C"}
{"id":"Mercury_7122640","answerKey":"A"}
{"id":"Mercury_402547","answerKey":"C"}
{"id":"Mercury_7133945","answerKey":"B"}
{"id":"Mercury_7199028","answerKey":"B"}
{"id":"Mercury_7217298","answerKey":"A"}
{"id":"Mercury_7057680","answerKey":"C"}
{"id":"Mercury_SC_400404","answerKey":"D"}
{"id":"Mercury_SC_408030","answerKey":"A"}
{"id":"Mercury_415083","answerKey":"B"}
{"id":"Mercury_409114","answerKey":"B"}
{"id":"Mercury_SC_415006","answerKey":"A"}
{"id":"MSA_2012_5_15","answerKey":"C"}
{"id":"Mercury_SC_402612","answerKey":"D"}
{"id":"Mercury_SC_405937","answerKey":"B"}
{"id":"Mercury_SC_416459","answerKey":"C"}
{"id":"NAEP_2000_8_S21+4","answerKey":"C"}
{"id":"Mercury_7072380","answerKey":"B"}
{"id":"Mercury_SC_401373","answerKey":"A"}
{"id":"Mercury_SC_400579","answerKey":"C"}
{"id":"MCAS_2003_5_14","answerKey":"C"}
{"id":"MSA_2015_8_30","answerKey":"A"}
{"id":"Mercury_SC_415416","answerKey":"C"}
{"id":"NYSEDREGENTS_2012_8_42","answerKey":"2"}
{"id":"NCEOGA_2013_5_9","answerKey":"B"}
{"id":"MEAP_2005_8_45","answerKey":"D"}
{"id":"Mercury_SC_400594","answerKey":"D"}
{"id":"NCEOGA_2013_8_43","answerKey":"A"}
{"id":"MCAS_2006_8_13","answerKey":"C"}
{"id":"Mercury_7168823","answerKey":"D"}
{"id":"Mercury_7158935","answerKey":"C"}
{"id":"Mercury_7172708","answerKey":"B"}
{"id":"ACTAAP_2010_5_1","answerKey":"A"}
{"id":"Mercury_7093048","answerKey":"C"}
{"id":"Mercury_7081603","answerKey":"C"}
{"id":"Mercury_SC_LBS11003","answerKey":"D"}
{"id":"MCAS_2005_8_2","answerKey":"C"}
{"id":"ACTAAP_2010_7_14","answerKey":"C"}
{"id":"NYSEDREGENTS_2008_4_15","answerKey":"C"}
{"id":"Mercury_7107240","answerKey":"A"}
{"id":"Mercury_7218628","answerKey":"C"}
{"id":"MSA_2013_5_23","answerKey":"D"}
{"id":"Mercury_7081725","answerKey":"A"}
{"id":"Mercury_SC_413542","answerKey":"A"}
{"id":"Mercury_SC_407302","answerKey":"C"}
{"id":"Mercury_175053","answerKey":"D"}
{"id":"Mercury_7161315","answerKey":"B"}
{"id":"Mercury_189070","answerKey":"A"}
{"id":"Mercury_7189123","answerKey":"B"}
{"id":"Mercury_SC_402171","answerKey":"D"}
{"id":"Mercury_7217368","answerKey":"B"}
{"id":"Mercury_LBS10933","answerKey":"A"}
{"id":"Mercury_7223160","answerKey":"C"}
{"id":"Mercury_SC_401324","answerKey":"D"}
{"id":"LEAP_2001_8_10379","answerKey":"A"}
{"id":"VASoL_2009_5_30","answerKey":"D"}
{"id":"Mercury_416404","answerKey":"D"}
{"id":"Mercury_7103530","answerKey":"B"}
{"id":"Mercury_7030870","answerKey":"A"}
{"id":"LEAP__7_10348","answerKey":"C"}
{"id":"Mercury_SC_406835","answerKey":"B"}
{"id":"Mercury_178255","answerKey":"B"}
{"id":"MDSA_2012_8_16","answerKey":"B"}
{"id":"Mercury_409645","answerKey":"D"}
{"id":"TIMSS_2003_8_pg47","answerKey":"C"}
{"id":"NYSEDREGENTS_2010_8_16","answerKey":"2"}
{"id":"Mercury_7159810","answerKey":"B"}
{"id":"Mercury_7267523","answerKey":"B"}
{"id":"Mercury_SC_401006","answerKey":"A"}
{"id":"ACTAAP_2010_7_12","answerKey":"D"}
{"id":"MEAP_2005_8_13","answerKey":"D"}
{"id":"Mercury_7164623","answerKey":"C"}
{"id":"Mercury_417127","answerKey":"A"}
{"id":"Mercury_411224","answerKey":"B"}
{"id":"TIMSS_2011_8_pg15","answerKey":"D"}
{"id":"NYSEDREGENTS_2012_8_19","answerKey":"3"}
{"id":"Mercury_7222460","answerKey":"A"}
{"id":"Mercury_7007420","answerKey":"C"}
{"id":"Mercury_SC_405710","answerKey":"D"}
{"id":"Mercury_SC_401375","answerKey":"C"}
{"id":"VASoL_2010_3_22","answerKey":"A"}
{"id":"Mercury_SC_408358","answerKey":"A"}
{"id":"NYSEDREGENTS_2013_8_42","answerKey":"2"}
{"id":"Mercury_SC_400661","answerKey":"D"}
{"id":"Mercury_SC_415422","answerKey":"C"}
{"id":"Mercury_SC_400162","answerKey":"D"}
{"id":"Mercury_7212328","answerKey":"B"}
{"id":"NCEOGA_2013_8_26","answerKey":"D"}
{"id":"Mercury_SC_407696","answerKey":"B"}
{"id":"Mercury_SC_400052","answerKey":"B"}
{"id":"Mercury_7212870","answerKey":"D"}
{"id":"NYSEDREGENTS_2010_8_35","answerKey":"3"}
{"id":"MCAS_2010_8_12005","answerKey":"A"}
{"id":"Mercury_7218505","answerKey":"B"}
{"id":"Mercury_SC_400853","answerKey":"D"}
{"id":"Mercury_7210455","answerKey":"C"}
{"id":"Mercury_7174738","answerKey":"C"}
{"id":"MCAS_2001_5_2","answerKey":"D"}
{"id":"NYSEDREGENTS_2012_4_9","answerKey":"A"}
{"id":"Mercury_416593","answerKey":"B"}
{"id":"Mercury_7205870","answerKey":"B"}
{"id":"Mercury_SC_401798","answerKey":"C"}
{"id":"Mercury_7084228","answerKey":"D"}
{"id":"Mercury_417460","answerKey":"A"}
{"id":"Mercury_402539","answerKey":"B"}
{"id":"Mercury_406800","answerKey":"B"}
{"id":"Mercury_SC_408321","answerKey":"A"}
{"id":"Mercury_SC_406836","answerKey":"C"}
{"id":"Mercury_SC_410963","answerKey":"D"}
{"id":"Mercury_7132405","answerKey":"D"}
{"id":"Mercury_SC_408872","answerKey":"C"}
{"id":"VASoL_2008_3_25","answerKey":"B"}
{"id":"WASL_2005_8_12","answerKey":"A"}
{"id":"AKDE&ED_2012_8_20","answerKey":"B"}
{"id":"Mercury_7056823","answerKey":"B"}
{"id":"Mercury_7205800","answerKey":"B"}
{"id":"Mercury_SC_402282","answerKey":"A"}
{"id":"MCAS_1998_8_26","answerKey":"C"}
{"id":"Mercury_7230318","answerKey":"D"}
{"id":"Mercury_SC_416167","answerKey":"C"}
{"id":"Mercury_7027720","answerKey":"B"}
{"id":"LEAP__5_10312","answerKey":"B"}
{"id":"Mercury_405161","answerKey":"D"}
{"id":"Mercury_SC_409245","answerKey":"D"}
{"id":"ACTAAP_2011_5_8","answerKey":"C"}
{"id":"Mercury_7223370","answerKey":"D"}
{"id":"Mercury_SC_400697","answerKey":"B"}
{"id":"Mercury_SC_401262","answerKey":"D"}
{"id":"Mercury_7136063","answerKey":"B"}
{"id":"Mercury_405876","answerKey":"D"}
{"id":"Mercury_7057890","answerKey":"A"}
{"id":"LEAP_2002_4_10247","answerKey":"B"}
{"id":"Mercury_SC_405481","answerKey":"C"}
{"id":"Mercury_SC_400401","answerKey":"B"}
{"id":"Mercury_7064260","answerKey":"B"}
{"id":"Mercury_7015995","answerKey":"B"}
{"id":"Mercury_400887","answerKey":"B"}
{"id":"Mercury_7247678","answerKey":"A"}
{"id":"MDSA_2007_8_24","answerKey":"C"}
{"id":"AKDE&ED_2008_8_48","answerKey":"A"}
{"id":"Mercury_401014","answerKey":"C"}
{"id":"Mercury_7106698","answerKey":"A"}
{"id":"Mercury_7143308","answerKey":"B"}
{"id":"MCAS_2005_9_21","answerKey":"B"}
{"id":"Mercury_400443","answerKey":"B"}
{"id":"Mercury_7283430","answerKey":"B"}
{"id":"Mercury_7159250","answerKey":"C"}
{"id":"Mercury_401912","answerKey":"A"}
{"id":"Mercury_7219328","answerKey":"B"}
{"id":"Mercury_7214498","answerKey":"C"}
{"id":"TAKS_2009_5_14","answerKey":"A"}
{"id":"NYSEDREGENTS_2013_4_17","answerKey":"D"}
{"id":"Mercury_403907","answerKey":"C"}
{"id":"Mercury_7081480","answerKey":"A"}
{"id":"Mercury_416505","answerKey":"B"}
{"id":"Mercury_7041668","answerKey":"D"}
{"id":"Mercury_SC_401309","answerKey":"C"}
{"id":"NYSEDREGENTS_2010_4_1","answerKey":"B"}
{"id":"ACTAAP_2007_7_36","answerKey":"C"}
{"id":"VASoL_2009_3_12","answerKey":"D"}
{"id":"Mercury_7085295","answerKey":"C"}
{"id":"Mercury_7201968","answerKey":"A"}
{"id":"Mercury_7214008","answerKey":"C"}
{"id":"Mercury_176855","answerKey":"B"}
{"id":"Mercury_SC_401678","answerKey":"B"}
{"id":"Mercury_417143","answerKey":"D"}
{"id":"NYSEDREGENTS_2013_4_21","answerKey":"D"}
{"id":"Mercury_7032620","answerKey":"B"}
{"id":"NYSEDREGENTS_2008_8_9","answerKey":"4"}
{"id":"TAKS_2009_8_27","answerKey":"C"}
{"id":"NCEOGA_2013_8_57","answerKey":"C"}
{"id":"Mercury_SC_413143","answerKey":"C"}
{"id":"Mercury_401195","answerKey":"D"}
{"id":"CSZ10358","answerKey":"A"}
{"id":"MCAS_1999_4_26","answerKey":"C"}
{"id":"AKDE&ED_2008_8_36","answerKey":"A"}
{"id":"Mercury_7017938","answerKey":"B"}
{"id":"MDSA_2013_8_32","answerKey":"D"}
{"id":"Mercury_7038028","answerKey":"C"}
{"id":"Mercury_7057103","answerKey":"C"}
{"id":"NYSEDREGENTS_2008_4_26","answerKey":"C"}
{"id":"Mercury_417117","answerKey":"C"}
{"id":"MCAS_2016_8_15","answerKey":"C"}
{"id":"Mercury_400780","answerKey":"C"}
{"id":"NYSEDREGENTS_2008_8_32","answerKey":"1"}
{"id":"Mercury_SC_416104","answerKey":"C"}
{"id":"Mercury_416646","answerKey":"B"}
{"id":"Mercury_SC_405296","answerKey":"C"}
{"id":"MCAS_2006_8_31","answerKey":"D"}
{"id":"MCAS_2015_5_14","answerKey":"A"}
{"id":"Mercury_417465","answerKey":"C"}
{"id":"MCAS_1998_4_19","answerKey":"D"}
{"id":"Mercury_7214778","answerKey":"D"}
{"id":"Mercury_7123393","answerKey":"D"}
{"id":"Mercury_7207550","answerKey":"D"}
{"id":"Mercury_SC_405827","answerKey":"B"}
{"id":"NYSEDREGENTS_2015_4_11","answerKey":"D"}
{"id":"Mercury_404097","answerKey":"C"}
{"id":"AIMS_2009_4_4","answerKey":"C"}
{"id":"NCEOGA_2013_8_18","answerKey":"A"}
{"id":"Mercury_400884","answerKey":"B"}
{"id":"Mercury_7219678","answerKey":"A"}
{"id":"ACTAAP_2010_5_7","answerKey":"B"}
{"id":"ACTAAP_2012_7_9","answerKey":"C"}
{"id":"MCAS_2005_8_6","answerKey":"A"}
{"id":"Mercury_SC_401162","answerKey":"C"}
{"id":"Mercury_SC_407710","answerKey":"D"}
{"id":"VASoL_2009_3_23","answerKey":"D"}
{"id":"Mercury_SC_402276","answerKey":"D"}
{"id":"Mercury_400744","answerKey":"A"}
{"id":"Mercury_SC_LBS10902","answerKey":"B"}
{"id":"Mercury_7133245","answerKey":"B"}
{"id":"Mercury_7131530","answerKey":"B"}
{"id":"Mercury_7041143","answerKey":"C"}
{"id":"MCAS_2010_5_11984","answerKey":"A"}
{"id":"Mercury_7159285","answerKey":"C"}
{"id":"AIMS_2008_8_13","answerKey":"D"}
{"id":"MDSA_2013_8_20","answerKey":"C"}
{"id":"Mercury_7114100","answerKey":"A"}
{"id":"Mercury_7213343","answerKey":"A"}
{"id":"Mercury_SC_LBS10597","answerKey":"A"}
{"id":"Mercury_7126263","answerKey":"B"}
{"id":"Mercury_7133613","answerKey":"A"}
{"id":"Mercury_7234605","answerKey":"D"}
{"id":"Mercury_SC_400839","answerKey":"B"}
{"id":"Mercury_SC_402984","answerKey":"B"}
{"id":"NYSEDREGENTS_2012_4_29","answerKey":"C"}
{"id":"VASoL_2009_3_22","answerKey":"D"}
{"id":"Mercury_409349","answerKey":"A"}
{"id":"Mercury_SC_407417","answerKey":"D"}
{"id":"VASoL_2007_5_21","answerKey":"C"}
{"id":"MCAS_2012_8_23651","answerKey":"A"}
{"id":"MCAS_2000_4_26","answerKey":"D"}
{"id":"Mercury_SC_410971","answerKey":"A"}
{"id":"Mercury_404841","answerKey":"D"}
{"id":"Mercury_416651","answerKey":"A"}
{"id":"Mercury_416576","answerKey":"B"}
{"id":"MCAS_1998_8_24","answerKey":"D"}
{"id":"Mercury_SC_408367","answerKey":"B"}
{"id":"Mercury_405804","answerKey":"D"}
{"id":"Mercury_7216318","answerKey":"A"}
{"id":"Mercury_401312","answerKey":"D"}
{"id":"MDSA_2013_8_23","answerKey":"D"}
{"id":"Mercury_SC_405880","answerKey":"A"}
{"id":"ACTAAP_2009_5_12","answerKey":"B"}
{"id":"CSZ20754","answerKey":"B"}
{"id":"Mercury_184363","answerKey":"D"}
{"id":"Mercury_7188195","answerKey":"C"}
{"id":"Mercury_7221043","answerKey":"D"}
{"id":"Mercury_7107328","answerKey":"B"}
{"id":"Mercury_415084","answerKey":"C"}
{"id":"Mercury_415082","answerKey":"A"}
{"id":"Mercury_SC_416169","answerKey":"A"}
{"id":"MEA_2011_8_13","answerKey":"A"}
{"id":"TIMSS_2003_4_pg82","answerKey":"A"}
{"id":"CSZ30338","answerKey":"C"}
{"id":"TIMSS_2003_8_pg85","answerKey":"B"}
{"id":"Mercury_7221988","answerKey":"C"}
{"id":"NCEOGA_2013_5_11","answerKey":"D"}
{"id":"MCAS_2013_8_29416","answerKey":"B"}
{"id":"Mercury_SC_401142","answerKey":"B"}
{"id":"Mercury_7206395","answerKey":"C"}
{"id":"Mercury_179025","answerKey":"B"}
{"id":"Mercury_7130620","answerKey":"B"}
{"id":"Mercury_177870","answerKey":"C"}
{"id":"Mercury_7282083","answerKey":"B"}
{"id":"Mercury_SC_400233","answerKey":"C"}
{"id":"Mercury_7082443","answerKey":"B"}
{"id":"NCEOGA_2013_8_15","answerKey":"B"}
{"id":"Mercury_7210140","answerKey":"A"}
{"id":"Mercury_7106593","answerKey":"D"}
{"id":"Mercury_416536","answerKey":"B"}
{"id":"Mercury_410026","answerKey":"D"}
{"id":"ACTAAP_2011_5_1","answerKey":"B"}
{"id":"Mercury_417138","answerKey":"B"}
{"id":"Mercury_7138915","answerKey":"D"}
{"id":"NYSEDREGENTS_2008_4_11","answerKey":"C"}
{"id":"Mercury_404435","answerKey":"D"}
{"id":"MDSA_2009_5_25","answerKey":"B"}
{"id":"OHAT_2007_8_12","answerKey":"C"}
{"id":"Mercury_LBS10302","answerKey":"C"}
{"id":"Mercury_7027248","answerKey":"B"}
{"id":"Mercury_SC_401360","answerKey":"A"}
{"id":"ACTAAP_2013_5_17","answerKey":"A"}
{"id":"Mercury_407125","answerKey":"C"}
{"id":"Mercury_404820","answerKey":"D"}
{"id":"Mercury_SC_416168","answerKey":"B"}
{"id":"TIMSS_1995_8_K18","answerKey":"A"}
{"id":"Mercury_SC_405130","answerKey":"A"}
{"id":"Mercury_SC_408631","answerKey":"C"}
{"id":"Mercury_SC_408763","answerKey":"C"}
{"id":"MCAS_8_2015_18","answerKey":"A"}
{"id":"Mercury_411729","answerKey":"B"}
{"id":"MDSA_2012_8_6","answerKey":"C"}
{"id":"MCAS_1999_8_5","answerKey":"B"}
{"id":"WASL_2004_8_17","answerKey":"D"}
{"id":"Mercury_414365","answerKey":"C"}
{"id":"Mercury_SC_415406","answerKey":"B"}
{"id":"MCAS_2000_8_29","answerKey":"B"}
{"id":"Mercury_416230","answerKey":"C"}
{"id":"Mercury_7001295","answerKey":"C"}
{"id":"MSA_2012_5_2","answerKey":"A"}
{"id":"MCAS_2005_8_7","answerKey":"C"}
{"id":"Mercury_7206553","answerKey":"D"}
{"id":"VASoL_2010_3_39","answerKey":"A"}
{"id":"Mercury_416380","answerKey":"A"}
{"id":"OHAT_2008_5_34","answerKey":"C"}
{"id":"Mercury_7268328","answerKey":"C"}
{"id":"NYSEDREGENTS_2008_8_36","answerKey":"3"}
{"id":"Mercury_SC_414156","answerKey":"C"}
{"id":"Mercury_7094133","answerKey":"D"}
{"id":"MEA_2013_5_15","answerKey":"C"}
{"id":"OHAT_2010_8_35","answerKey":"A"}
{"id":"Mercury_SC_416174","answerKey":"C"}
{"id":"TIMSS_1995_8_J6","answerKey":"B"}
{"id":"Mercury_SC_401587","answerKey":"A"}
{"id":"MDSA_2011_5_23","answerKey":"D"}
{"id":"AIMS_2008_8_11","answerKey":"C"}
{"id":"Mercury_7159215","answerKey":"D"}
{"id":"MCAS_2006_9_30","answerKey":"B"}
{"id":"MCAS_1999_4_27","answerKey":"A"}
{"id":"Mercury_7016538","answerKey":"C"}
{"id":"Mercury_SC_409266","answerKey":"A"}
{"id":"OHAT_2007_5_15","answerKey":"B"}
{"id":"Mercury_7230073","answerKey":"B"}
{"id":"Mercury_7245840","answerKey":"B"}
{"id":"Mercury_SC_401788","answerKey":"B"}
{"id":"ACTAAP_2014_7_5","answerKey":"D"}
{"id":"MCAS_2004_5_11","answerKey":"D"}
{"id":"NCEOGA_2013_8_7","answerKey":"A"}
{"id":"LEAP__7_10339","answerKey":"A"}
{"id":"Mercury_7018270","answerKey":"C"}
{"id":"Mercury_7034808","answerKey":"A"}
{"id":"Mercury_7216300","answerKey":"C"}
{"id":"Mercury_SC_400985","answerKey":"C"}
{"id":"Mercury_7188528","answerKey":"A"}
{"id":"TIMSS_1995_8_R2","answerKey":"B"}
{"id":"Mercury_SC_400032","answerKey":"C"}
{"id":"Mercury_7252245","answerKey":"D"}
{"id":"MCAS_2002_8_17","answerKey":"D"}
{"id":"MDSA_2007_8_30","answerKey":"C"}
{"id":"NCEOGA_2013_5_35","answerKey":"D"}
{"id":"Mercury_7082758","answerKey":"B"}
{"id":"Mercury_7094308","answerKey":"C"}
{"id":"Mercury_7136028","answerKey":"A"}
{"id":"Mercury_7159075","answerKey":"B"}
{"id":"MCAS_2015_5_19","answerKey":"B"}
{"id":"MSA_2012_5_12","answerKey":"A"}
{"id":"MCAS_2014_5_13","answerKey":"D"}
{"id":"Mercury_SC_400392","answerKey":"C"}
{"id":"Mercury_7159320","answerKey":"A"}
{"id":"Mercury_7218365","answerKey":"B"}
{"id":"MCAS_2004_9_10-v1","answerKey":"C"}
{"id":"AIMS_2009_4_12","answerKey":"D"}
{"id":"Mercury_SC_414274","answerKey":"B"}
{"id":"MCAS_2005_9_6","answerKey":"C"}
{"id":"MCAS_1998_4_23","answerKey":"A"}
{"id":"Mercury_7075023","answerKey":"D"}
{"id":"Mercury_SC_400182","answerKey":"B"}
{"id":"Mercury_SC_400133","answerKey":"B"}
{"id":"MSA_2013_5_11","answerKey":"D"}
{"id":"Mercury_SC_408706","answerKey":"B"}
{"id":"Mercury_7213325","answerKey":"D"}
{"id":"Mercury_SC_LBS10932","answerKey":"B"}
{"id":"Mercury_192220","answerKey":"D"}
{"id":"Mercury_SC_407247","answerKey":"C"}
{"id":"Mercury_7024798","answerKey":"D"}
{"id":"Mercury_7180810","answerKey":"C"}
{"id":"Mercury_412780","answerKey":"B"}
{"id":"LEAP_2011_8_10434","answerKey":"B"}
{"id":"Mercury_7200340","answerKey":"D"}
{"id":"Mercury_7056525","answerKey":"C"}
{"id":"Mercury_7085278","answerKey":"D"}
{"id":"AKDE&ED_2008_4_35","answerKey":"D"}
{"id":"MCAS_1999_8_16","answerKey":"D"}
{"id":"Mercury_SC_400063","answerKey":"D"}
{"id":"Mercury_SC_401666","answerKey":"D"}
{"id":"TIMSS_2011_8_pg31","answerKey":"C"}
{"id":"Mercury_412673","answerKey":"B"}
{"id":"Mercury_7130655","answerKey":"D"}
{"id":"MCAS_2004_5_7","answerKey":"B"}
{"id":"Mercury_7187373","answerKey":"D"}
{"id":"Mercury_SC_401361","answerKey":"C"}
{"id":"MCAS_2006_8_12","answerKey":"D"}
{"id":"Mercury_7233765","answerKey":"B"}
{"id":"Mercury_SC_407613","answerKey":"C"}
{"id":"MCAS_2005_5_24","answerKey":"A"}
{"id":"Mercury_405778","answerKey":"C"}
{"id":"Mercury_7263060","answerKey":"D"}
{"id":"Mercury_SC_401668","answerKey":"D"}
{"id":"Mercury_7230388","answerKey":"D"}
{"id":"Mercury_7041650","answerKey":"D"}
{"id":"Mercury_SC_409009","answerKey":"A"}
{"id":"Mercury_7223143","answerKey":"D"}
{"id":"ACTAAP_2007_7_3","answerKey":"D"}
{"id":"Mercury_7215670","answerKey":"C"}
{"id":"MEA_2010_8_15","answerKey":"A"}
{"id":"Mercury_7270515","answerKey":"B"}
{"id":"Mercury_7006160","answerKey":"C"}
{"id":"Mercury_SC_410630","answerKey":"C"}
{"id":"Mercury_7082320","answerKey":"A"}
{"id":"MEA_2013_8_1","answerKey":"A"}
{"id":"Mercury_7033845","answerKey":"B"}
{"id":"Mercury_7221620","answerKey":"A"}
{"id":"LEAP__7_10352","answerKey":"D"}
{"id":"Mercury_412605","answerKey":"C"}
{"id":"Mercury_416638","answerKey":"A"}
{"id":"MCAS_2011_8_17694","answerKey":"A"}
{"id":"Mercury_SC_400012","answerKey":"D"}
{"id":"Mercury_SC_413458","answerKey":"B"}
{"id":"Mercury_7139545","answerKey":"C"}
{"id":"NYSEDREGENTS_2015_4_5","answerKey":"B"}
{"id":"TIMSS_2003_8_pg16","answerKey":"C"}
{"id":"Mercury_SC_415073","answerKey":"A"}
{"id":"Mercury_7012880","answerKey":"A"}
{"id":"Mercury_191625","answerKey":"D"}
{"id":"Mercury_SC_402985","answerKey":"B"}
{"id":"Mercury_7005425","answerKey":"A"}
{"id":"MDSA_2013_8_40","answerKey":"C"}
{"id":"Mercury_401684","answerKey":"B"}
{"id":"NCEOGA_2013_5_17","answerKey":"B"}
{"id":"Mercury_7116183","answerKey":"B"}
{"id":"Mercury_7106628","answerKey":"D"}
{"id":"Mercury_7203473","answerKey":"D"}
{"id":"Mercury_SC_416108","answerKey":"C"}
{"id":"LEAP_2007_8_10418","answerKey":"C"}
{"id":"Mercury_7111178","answerKey":"C"}
{"id":"Mercury_7203560","answerKey":"A"}
{"id":"ACTAAP_2013_7_2","answerKey":"A"}
{"id":"MCAS_2012_8_23640","answerKey":"D"}
{"id":"Mercury_404272","answerKey":"B"}
{"id":"MCAS_2009_8_17","answerKey":"B"}
{"id":"AIMS_2008_4_5","answerKey":"C"}
{"id":"Mercury_7236513","answerKey":"D"}
{"id":"Mercury_SC_LBS10027","answerKey":"B"}
{"id":"Mercury_189053","answerKey":"D"}
{"id":"Mercury_SC_414271","answerKey":"C"}
{"id":"Mercury_408922","answerKey":"C"}
{"id":"Mercury_7264093","answerKey":"C"}
{"id":"Mercury_SC_LBS11009","answerKey":"C"}
{"id":"Mercury_7191433","answerKey":"C"}
{"id":"MEAP_2005_5_14","answerKey":"D"}
{"id":"Mercury_416683","answerKey":"C"}
{"id":"Mercury_7040775","answerKey":"B"}
{"id":"Mercury_7222600","answerKey":"C"}
{"id":"MCAS_2001_5_3","answerKey":"D"}
{"id":"MCAS_2004_8_7","answerKey":"D"}
{"id":"Mercury_415268","answerKey":"D"}
{"id":"Mercury_7017710","answerKey":"C"}
{"id":"Mercury_7210123","answerKey":"A"}
{"id":"MCAS_2009_5_6519","answerKey":"C"}
{"id":"Mercury_401502","answerKey":"D"}
{"id":"Mercury_7109498","answerKey":"B"}
{"id":"VASoL_2008_5_10","answerKey":"A"}
{"id":"MCAS_2006_9_4","answerKey":"A"}
{"id":"Mercury_402341","answerKey":"D"}
{"id":"MCAS_2006_9_34","answerKey":"D"}
{"id":"Mercury_7267715","answerKey":"C"}
{"id":"Mercury_SC_413089","answerKey":"D"}
{"id":"Mercury_SC_401656","answerKey":"A"}
{"id":"Mercury_407019","answerKey":"D"}
{"id":"Mercury_417128","answerKey":"B"}
{"id":"Mercury_7081305","answerKey":"D"}
{"id":"NYSEDREGENTS_2015_8_3","answerKey":"2"}
{"id":"MEA_2016_8_15","answerKey":"A"}
{"id":"ACTAAP_2015_7_9","answerKey":"D"}
{"id":"Mercury_7216423","answerKey":"B"}
{"id":"Mercury_416633","answerKey":"A"}
{"id":"Mercury_7038518","answerKey":"D"}
{"id":"Mercury_7085225","answerKey":"B"}
{"id":"LEAP__4_10225","answerKey":"B"}
{"id":"Mercury_SC_401661","answerKey":"D"}
{"id":"TIMSS_1995_8_Q15","answerKey":"A"}
{"id":"MCAS_1999_4_23","answerKey":"A"}
{"id":"TIMSS_1995_8_J7","answerKey":"D"}
{"id":"Mercury_SC_LBS10018","answerKey":"A"}
{"id":"Mercury_SC_406855","answerKey":"D"}
{"id":"Mercury_SC_415457","answerKey":"D"}
{"id":"NYSEDREGENTS_2015_4_25","answerKey":"C"}
{"id":"Mercury_7058135","answerKey":"B"}
{"id":"MDSA_2008_4_19","answerKey":"B"}
{"id":"AKDE&ED_2008_8_45","answerKey":"D"}
{"id":"Mercury_7131758","answerKey":"A"}
{"id":"NYSEDREGENTS_2013_8_10","answerKey":"2"}
{"id":"Mercury_SC_401783","answerKey":"D"}
{"id":"Mercury_7190120","answerKey":"B"}
{"id":"Mercury_409317","answerKey":"B"}
{"id":"Mercury_7268240","answerKey":"D"}
{"id":"Mercury_7228358","answerKey":"D"}
{"id":"MCAS_2004_5_33","answerKey":"C"}
{"id":"Mercury_7008855","answerKey":"B"}
{"id":"Mercury_7057085","answerKey":"B"}
{"id":"Mercury_7171728","answerKey":"B"}
{"id":"NAEP_2005_4_S14+3","answerKey":"C"}
{"id":"Mercury_7024395","answerKey":"A"}
{"id":"NYSEDREGENTS_2012_8_28","answerKey":"3"}
{"id":"Mercury_7090790","answerKey":"A"}
{"id":"TIMSS_2003_8_pg87","answerKey":"B"}
{"id":"Mercury_SC_407382","answerKey":"C"}
{"id":"MDSA_2010_4_20","answerKey":"B"}
{"id":"Mercury_SC_405019","answerKey":"D"}
{"id":"Mercury_7123078","answerKey":"D"}
{"id":"Mercury_400084","answerKey":"D"}
{"id":"Mercury_7139650","answerKey":"C"}
{"id":"Mercury_417150","answerKey":"B"}
{"id":"Mercury_SC_402256","answerKey":"C"}
{"id":"TIMSS_2007_8_pg53","answerKey":"A"}
{"id":"MCAS_2006_9_17-v1","answerKey":"D"}
{"id":"Mercury_401728","answerKey":"D"}
{"id":"Mercury_7192798","answerKey":"C"}
{"id":"Mercury_7221078","answerKey":"C"}
{"id":"Mercury_7004953","answerKey":"B"}
{"id":"TIMSS_2003_8_pg94","answerKey":"D"}
{"id":"Mercury_7095060","answerKey":"A"}
{"id":"Mercury_7123358","answerKey":"A"}
{"id":"Mercury_7069020","answerKey":"A"}
{"id":"TIMSS_2003_8_pg117","answerKey":"B"}
{"id":"VASoL_2008_3_32","answerKey":"B"}
{"id":"Mercury_SC_400142","answerKey":"A"}
{"id":"Mercury_7163818","answerKey":"D"}
{"id":"Mercury_402502","answerKey":"D"}
{"id":"Mercury_7130778","answerKey":"C"}
{"id":"MEA_2010_8_18","answerKey":"C"}
{"id":"Mercury_7211033","answerKey":"B"}
{"id":"NYSEDREGENTS_2008_8_17","answerKey":"1"}
{"id":"NAEP_2005_8_S11+1","answerKey":"C"}
{"id":"Mercury_412774","answerKey":"C"}
{"id":"MEA_2013_5_12","answerKey":"C"}
{"id":"Mercury_7098473","answerKey":"B"}
{"id":"Mercury_417593","answerKey":"A"}
{"id":"Mercury_7081743","answerKey":"B"}
{"id":"Mercury_7018410","answerKey":"C"}
{"id":"Mercury_402563","answerKey":"B"}
{"id":"Mercury_416407","answerKey":"C"}
{"id":"Mercury_SC_400400","answerKey":"A"}
{"id":"MCAS_2000_8_22","answerKey":"D"}
{"id":"MCAS_8_2014_8","answerKey":"A"}
{"id":"Mercury_7206430","answerKey":"A"}
{"id":"Mercury_7185343","answerKey":"D"}
{"id":"OHAT_2010_8_8","answerKey":"B"}
{"id":"Mercury_405462","answerKey":"B"}
{"id":"Mercury_SC_LBS10337","answerKey":"A"}
{"id":"Mercury_7142520","answerKey":"C"}
{"id":"Mercury_SC_405501","answerKey":"D"}
{"id":"Mercury_7009555","answerKey":"C"}
{"id":"Mercury_409085","answerKey":"A"}
{"id":"NYSEDREGENTS_2012_4_2","answerKey":"A"}
{"id":"Mercury_407539","answerKey":"D"}
{"id":"ACTAAP_2013_7_16","answerKey":"D"}
{"id":"AKDE&ED_2008_8_34","answerKey":"D"}
{"id":"MCAS_2004_8_3","answerKey":"A"}
{"id":"Mercury_415272","answerKey":"B"}
{"id":"Mercury_405387","answerKey":"B"}
{"id":"Mercury_7116323","answerKey":"D"}
{"id":"Mercury_7213430","answerKey":"B"}
{"id":"Mercury_7234360","answerKey":"A"}
{"id":"Mercury_405685","answerKey":"D"}
{"id":"Mercury_7236740","answerKey":"B"}
{"id":"Mercury_7116235","answerKey":"A"}
{"id":"Mercury_SC_405357","answerKey":"C"}
{"id":"Mercury_7042945","answerKey":"B"}
{"id":"Mercury_7106750","answerKey":"D"}
{"id":"MDSA_2009_4_34","answerKey":"B"}
{"id":"Mercury_7016310","answerKey":"C"}
{"id":"VASoL_2007_3_1","answerKey":"A"}
{"id":"Mercury_7030468","answerKey":"A"}
{"id":"Mercury_SC_402616","answerKey":"D"}
{"id":"Mercury_405464","answerKey":"A"}
{"id":"Mercury_7205608","answerKey":"B"}
{"id":"Mercury_7015208","answerKey":"B"}
{"id":"Mercury_SC_409666","answerKey":"D"}
{"id":"Mercury_7230353","answerKey":"B"}
{"id":"Mercury_7150343","answerKey":"B"}
{"id":"Mercury_7026723","answerKey":"D"}
{"id":"Mercury_7024273","answerKey":"C"}
{"id":"AKDE&ED_2008_8_40","answerKey":"C"}
{"id":"Mercury_183033","answerKey":"B"}
{"id":"Mercury_402364","answerKey":"D"}
{"id":"Mercury_7263183","answerKey":"B"}
{"id":"Mercury_7222530","answerKey":"B"}
{"id":"OHAT_2009_8_36","answerKey":"C"}
{"id":"Mercury_7141750","answerKey":"B"}
{"id":"TIMSS_2011_4_pg45","answerKey":"A"}
{"id":"MCAS_2014_5_5","answerKey":"A"}
{"id":"Mercury_SC_409241","answerKey":"C"}
{"id":"Mercury_SC_401147","answerKey":"C"}
{"id":"Mercury_SC_LBS10273","answerKey":"D"}
{"id":"Mercury_401523","answerKey":"A"}
{"id":"Mercury_401865","answerKey":"D"}
{"id":"MCAS_2013_8_29435","answerKey":"B"}
{"id":"Mercury_SC_406720","answerKey":"C"}
{"id":"NYSEDREGENTS_2013_8_34","answerKey":"1"}
{"id":"Mercury_7038833","answerKey":"C"}
{"id":"Mercury_175560","answerKey":"C"}
{"id":"Mercury_7005005","answerKey":"C"}
{"id":"Mercury_183890","answerKey":"D"}
{"id":"Mercury_7270358","answerKey":"D"}
{"id":"MCAS_2013_5_29411","answerKey":"C"}
{"id":"ACTAAP_2007_7_31","answerKey":"C"}
{"id":"Mercury_7082023","answerKey":"B"}
{"id":"MCAS_2003_8_21","answerKey":"B"}
{"id":"NYSEDREGENTS_2015_8_9","answerKey":"2"}
{"id":"Mercury_7064750","answerKey":"D"}
{"id":"TIMSS_2007_8_pg113","answerKey":"C"}
{"id":"Mercury_7173583","answerKey":"C"}
{"id":"Mercury_403930","answerKey":"C"}
{"id":"Mercury_417118","answerKey":"C"}
{"id":"Mercury_7143010","answerKey":"B"}
{"id":"Mercury_SC_401801","answerKey":"A"}
{"id":"Mercury_410334","answerKey":"B"}
{"id":"NAEP_2000_4_S12+3","answerKey":"B"}
{"id":"Mercury_7218015","answerKey":"D"}
{"id":"Mercury_7109603","answerKey":"A"}
{"id":"NYSEDREGENTS_2008_8_42","answerKey":"3"}
{"id":"NAEP_2000_8_S11+11","answerKey":"B"}
{"id":"Mercury_7271670","answerKey":"D"}
{"id":"ACTAAP_2009_5_8","answerKey":"B"}
{"id":"NYSEDREGENTS_2012_4_1","answerKey":"B"}
{"id":"Mercury_SC_409030","answerKey":"D"}
{"id":"MEA_2013_8_8","answerKey":"D"}
{"id":"Mercury_7140333","answerKey":"D"}
{"id":"Mercury_SC_LBS10664","answerKey":"C"}
{"id":"Mercury_7171430","answerKey":"B"}
{"id":"Mercury_SC_407572","answerKey":"D"}
{"id":"VASoL_2009_3_2","answerKey":"D"}
{"id":"Mercury_SC_407383","answerKey":"D"}
{"id":"Mercury_7218400","answerKey":"B"}
{"id":"Mercury_184818","answerKey":"D"}
{"id":"Mercury_SC_405931","answerKey":"D"}
{"id":"Mercury_SC_416177","answerKey":"C"}
{"id":"Mercury_SC_406625","answerKey":"B"}
{"id":"MCAS_2014_8_16","answerKey":"D"}
{"id":"Mercury_7138460","answerKey":"C"}
{"id":"Mercury_7129640","answerKey":"B"}
{"id":"Mercury_7024290","answerKey":"B"}
{"id":"NYSEDREGENTS_2008_4_28","answerKey":"C"}
{"id":"Mercury_SC_414339","answerKey":"C"}
{"id":"LEAP_2000_8_2","answerKey":"A"}
{"id":"Mercury_7172270","answerKey":"A"}
{"id":"Mercury_184205","answerKey":"C"}
{"id":"Mercury_SC_400683","answerKey":"A"}
{"id":"Mercury_7182210","answerKey":"B"}
{"id":"Mercury_7238945","answerKey":"C"}
{"id":"Mercury_SC_408748","answerKey":"D"}
{"id":"MEA_2016_5_4","answerKey":"D"}
{"id":"Mercury_7271513","answerKey":"D"}
{"id":"Mercury_7189000","answerKey":"A"}
{"id":"Mercury_SC_401585","answerKey":"A"}
{"id":"Mercury_188528","answerKey":"D"}
{"id":"Mercury_SC_415719","answerKey":"A"}
{"id":"Mercury_SC_407072","answerKey":"A"}
{"id":"Mercury_7091823","answerKey":"A"}
{"id":"Mercury_7040985","answerKey":"B"}
{"id":"Mercury_SC_409383","answerKey":"D"}
{"id":"Mercury_SC_407080","answerKey":"D"}
{"id":"MCAS_2000_4_34","answerKey":"A"}
{"id":"Mercury_7032498","answerKey":"D"}
{"id":"TAKS_2009_5_30","answerKey":"A"}
{"id":"Mercury_SC_415761","answerKey":"C"}
{"id":"ACTAAP_2008_5_10","answerKey":"D"}
{"id":"Mercury_416671","answerKey":"D"}
{"id":"Mercury_400803","answerKey":"D"}
{"id":"Mercury_7005880","answerKey":"C"}
{"id":"Mercury_7210508","answerKey":"C"}
{"id":"NYSEDREGENTS_2013_4_1","answerKey":"C"}
{"id":"NYSEDREGENTS_2008_4_12","answerKey":"C"}
{"id":"Mercury_400091","answerKey":"B"}
{"id":"Mercury_SC_402257","answerKey":"D"}
{"id":"Mercury_7227815","answerKey":"D"}
{"id":"ACTAAP_2010_7_3","answerKey":"A"}
{"id":"Mercury_SC_410905","answerKey":"D"}
{"id":"OHAT_2010_5_18","answerKey":"D"}
{"id":"NAEP_2000_8_S11+10","answerKey":"A"}
{"id":"MCAS_2003_8_29","answerKey":"B"}
{"id":"Mercury_401433","answerKey":"D"}
{"id":"TIMSS_1995_8_N4","answerKey":"B"}
{"id":"Mercury_SC_405885","answerKey":"C"}
{"id":"Mercury_7263638","answerKey":"B"}
{"id":"Mercury_401428","answerKey":"B"}
{"id":"Mercury_SC_402121","answerKey":"B"}
{"id":"NYSEDREGENTS_2015_4_7","answerKey":"A"}
{"id":"MCAS_2012_5_23614","answerKey":"C"}
{"id":"Mercury_407262","answerKey":"A"}
{"id":"MCAS_2014_8_6","answerKey":"B"}
{"id":"Mercury_7032515","answerKey":"D"}
{"id":"Mercury_7270165","answerKey":"C"}
{"id":"Mercury_7017045","answerKey":"B"}
{"id":"Mercury_SC_400386","answerKey":"A"}
{"id":"Mercury_400750","answerKey":"D"}
{"id":"MCAS_2006_9_28-v1","answerKey":"C"}
{"id":"Mercury_416376","answerKey":"D"}
{"id":"Mercury_7086520","answerKey":"A"}
{"id":"Mercury_7014333","answerKey":"A"}
{"id":"Mercury_SC_406623","answerKey":"A"}
{"id":"Mercury_7042648","answerKey":"A"}
{"id":"MCAS_2004_8_23","answerKey":"B"}
{"id":"MCAS_2013_8_29425","answerKey":"A"}
{"id":"MEAP_2005_5_15","answerKey":"B"}
{"id":"Mercury_7016258","answerKey":"B"}
{"id":"NCEOGA_2013_8_5","answerKey":"A"}
{"id":"Mercury_7015540","answerKey":"B"}
{"id":"Mercury_SC_414001","answerKey":"D"}
{"id":"Mercury_7017973","answerKey":"D"}
{"id":"Mercury_407097","answerKey":"C"}
{"id":"Mercury_SC_406794","answerKey":"C"}
{"id":"Mercury_7227710","answerKey":"D"}
{"id":"Mercury_SC_406710","answerKey":"C"}
{"id":"Mercury_401926","answerKey":"D"}
{"id":"MCAS_2014_5_15","answerKey":"D"}
{"id":"Mercury_LBS10151","answerKey":"A"}
{"id":"ACTAAP_2013_5_8","answerKey":"B"}
{"id":"Mercury_SC_407592","answerKey":"A"}
{"id":"TIMSS_1995_8_L6","answerKey":"D"}
{"id":"Mercury_7233398","answerKey":"C"}
{"id":"Mercury_407664","answerKey":"D"}
{"id":"Mercury_SC_408657","answerKey":"D"}
{"id":"Mercury_7142800","answerKey":"B"}
{"id":"Mercury_SC_410837","answerKey":"B"}
{"id":"Mercury_7154315","answerKey":"A"}
{"id":"Mercury_7239628","answerKey":"D"}
{"id":"Mercury_401241","answerKey":"A"}
{"id":"Mercury_SC_408251","answerKey":"A"}
{"id":"Mercury_7175893","answerKey":"D"}
{"id":"Mercury_7202843","answerKey":"A"}
{"id":"Mercury_7159023","answerKey":"C"}
{"id":"MDSA_2008_8_3","answerKey":"D"}
{"id":"Mercury_7218348","answerKey":"A"}
{"id":"Mercury_SC_406458","answerKey":"D"}
{"id":"LEAP_2007_4_10280","answerKey":"A"}
{"id":"Mercury_7216965","answerKey":"C"}
{"id":"NYSEDREGENTS_2010_8_42","answerKey":"2"}
{"id":"LEAP__7_10351","answerKey":"C"}
{"id":"Mercury_SC_400590","answerKey":"D"}
{"id":"Mercury_7086608","answerKey":"B"}
{"id":"Mercury_7187863","answerKey":"D"}
{"id":"Mercury_7120873","answerKey":"C"}
{"id":"Mercury_184730","answerKey":"B"}
{"id":"Mercury_SC_401265","answerKey":"D"}
{"id":"OHAT_2009_8_34","answerKey":"D"}
{"id":"Mercury_406639","answerKey":"B"}
{"id":"Mercury_7008610","answerKey":"D"}
{"id":"MCAS_2009_8_12","answerKey":"A"}
{"id":"MCAS_2005_8_12","answerKey":"C"}
{"id":"ACTAAP_2008_7_4","answerKey":"B"}
{"id":"NYSEDREGENTS_2008_4_3","answerKey":"D"}
{"id":"Mercury_SC_416181","answerKey":"B"}
{"id":"NYSEDREGENTS_2010_4_30","answerKey":"B"}
{"id":"Mercury_7025060","answerKey":"D"}
{"id":"Mercury_SC_402103","answerKey":"D"}
{"id":"VASoL_2009_5_37","answerKey":"C"}
{"id":"Mercury_SC_402981","answerKey":"D"}
{"id":"NYSEDREGENTS_2008_8_5","answerKey":"2"}
{"id":"MCAS_1998_4_13","answerKey":"B"}
{"id":"MDSA_2008_8_20","answerKey":"C"}
{"id":"Mercury_SC_400134","answerKey":"D"}
{"id":"Mercury_SC_LBS10265","answerKey":"C"}
{"id":"Mercury_7188580","answerKey":"A"}
{"id":"Mercury_402348","answerKey":"A"}
{"id":"Mercury_7030555","answerKey":"C"}
{"id":"Mercury_SC_415453","answerKey":"A"}
{"id":"Mercury_7074848","answerKey":"A"}
{"id":"Mercury_SC_400582","answerKey":"A"}
{"id":"Mercury_SC_401168","answerKey":"C"}
{"id":"Mercury_180828","answerKey":"C"}
{"id":"FCAT_2008_5_1","answerKey":"A"}
{"id":"TAKS_2009_5_25","answerKey":"C"}
{"id":"Mercury_SC_LBS10392","answerKey":"A"}
{"id":"Mercury_7212905","answerKey":"B"}
{"id":"Mercury_7212888","answerKey":"A"}
{"id":"MDSA_2007_8_42","answerKey":"D"}
{"id":"Mercury_SC_415534","answerKey":"B"}
{"id":"Mercury_7213413","answerKey":"D"}
{"id":"Mercury_7068635","answerKey":"C"}
{"id":"Mercury_417137","answerKey":"C"}
{"id":"Mercury_7268258","answerKey":"C"}
{"id":"NAEP_2005_4_S13+14","answerKey":"C"}
{"id":"Mercury_SC_406089","answerKey":"D"}
{"id":"Mercury_SC_400700","answerKey":"C"}
{"id":"Mercury_7223493","answerKey":"A"}
{"id":"Mercury_SC_405928","answerKey":"D"}
{"id":"MCAS_2009_5_6518","answerKey":"D"}
{"id":"MCAS_2006_9_1","answerKey":"B"}
{"id":"Mercury_7239383","answerKey":"B"}
{"id":"Mercury_SC_400130","answerKey":"B"}
{"id":"Mercury_401426","answerKey":"D"}
{"id":"MCAS_2010_8_12016","answerKey":"C"}
{"id":"Mercury_SC_400324","answerKey":"B"}
{"id":"Mercury_SC_LBS10662","answerKey":"C"}
{"id":"VASoL_2009_3_8","answerKey":"D"}
{"id":"Mercury_SC_401185","answerKey":"C"}
{"id":"NYSEDREGENTS_2015_8_29","answerKey":"3"}
{"id":"Mercury_7234378","answerKey":"D"}
{"id":"ACTAAP_2014_7_3","answerKey":"D"}
{"id":"MDSA_2008_8_27","answerKey":"D"}
{"id":"Mercury_7004725","answerKey":"A"}
{"id":"Mercury_405143","answerKey":"C"}
{"id":"MCAS_2003_8_7","answerKey":"A"}
{"id":"Mercury_SC_405341","answerKey":"A"}
{"id":"Mercury_7283833","answerKey":"B"}
{"id":"Mercury_7159303","answerKey":"A"}
{"id":"Mercury_406427","answerKey":"C"}
{"id":"Mercury_SC_414129","answerKey":"B"}
{"id":"Mercury_7108990","answerKey":"A"}
{"id":"Mercury_SC_407315","answerKey":"A"}
{"id":"Mercury_SC_408663","answerKey":"B"}
{"id":"MEA_2013_8_18","answerKey":"A"}
{"id":"Mercury_7111125","answerKey":"A"}
{"id":"LEAP_2009_8_10430","answerKey":"A"}
{"id":"Mercury_7165218","answerKey":"D"}
{"id":"MEA_2013_8_15","answerKey":"A"}
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/arc/data-challenge/question-answers.jsonl/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/arc/data-challenge/question-answers.jsonl",
"repo_id": "ContextualSP",
"token_count": 20152
}
| 234 |
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_1","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_10","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_2","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_3","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_4","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_5","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_6","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_7","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_8","score":0.5}
{"chain_id":"3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_9","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_1","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_10","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_2","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_3","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_4","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_5","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_6","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_7","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_8","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_9","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_1","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_10","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_2","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_3","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_4","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_5","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_6","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_7","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_8","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_9","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_1","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_10","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_2","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_3","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_4","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_5","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_6","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_7","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_8","score":0.5}
{"chain_id":"308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_9","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_1","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_10","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_2","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_3","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_4","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_5","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_6","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_7","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_8","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_9","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_1","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_10","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_2","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_3","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_4","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_5","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_6","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_7","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_8","score":0.5}
{"chain_id":"308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_9","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_1","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_10","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_2","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_3","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_4","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_5","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_6","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_7","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_8","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_9","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_1","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_10","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_2","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_3","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_4","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_5","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_6","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_7","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_8","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_9","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_1","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_10","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_2","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_3","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_4","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_5","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_6","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_7","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_8","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_9","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_1","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_10","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_2","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_3","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_4","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_5","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_6","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_7","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_8","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_9","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_1","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_10","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_2","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_3","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_4","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_5","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_6","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_7","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_8","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_9","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_1","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_10","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_2","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_3","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_4","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_5","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_6","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_7","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_8","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_9","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_1","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_10","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_2","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_3","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_4","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_5","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_6","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_7","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_8","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_9","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_1","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_10","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_2","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_3","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_4","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_5","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_6","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_7","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_8","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_9","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_1","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_10","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_2","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_3","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_4","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_5","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_6","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_7","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_8","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_9","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_1","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_10","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_2","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_3","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_4","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_5","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_6","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_7","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_8","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_9","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_1","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_10","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_2","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_3","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_4","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_5","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_6","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_7","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_8","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_9","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_1","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_10","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_2","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_3","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_4","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_5","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_6","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_7","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_8","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_9","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_1","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_10","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_2","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_3","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_4","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_5","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_6","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_7","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_8","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_9","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_1","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_10","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_2","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_3","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_4","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_5","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_6","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_7","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_8","score":0.5}
{"chain_id":"30X31N5D63PAUWOOLAJ8THKT52USAE_1_9","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_1","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_10","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_2","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_3","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_4","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_5","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_6","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_7","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_8","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_9","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_1","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_10","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_2","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_3","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_4","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_5","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_6","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_7","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_8","score":0.5}
{"chain_id":"30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_9","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_1","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_10","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_2","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_3","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_4","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_5","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_6","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_7","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_8","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_9","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_1","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_10","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_2","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_3","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_4","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_5","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_6","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_7","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_8","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_9","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_1","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_10","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_2","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_3","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_4","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_5","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_6","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_7","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_8","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY648IINQ_1_9","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_1","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_10","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_2","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_3","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_4","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_5","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_6","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_7","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_8","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_9","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_1","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_10","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_2","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_3","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_4","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_5","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_6","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_7","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_8","score":0.5}
{"chain_id":"317HQ483I7RSK1FHP2UZBLY6SUSINW_1_9","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_1","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_10","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_2","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_3","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_4","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_5","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_6","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_7","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_8","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_9","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_1","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_10","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_2","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_3","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_4","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_5","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_6","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_7","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_8","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_9","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_1","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_10","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_2","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_3","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_4","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_5","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_6","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_7","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_8","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_9","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_1","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_10","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_2","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_3","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_4","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_5","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_6","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_7","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_8","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_9","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_1","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_10","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_2","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_3","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_4","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_5","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_6","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_7","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_8","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_9","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_1","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_10","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_2","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_3","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_4","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_5","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_6","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_7","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_8","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_9","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_1","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_10","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_2","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_3","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_4","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_5","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_6","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_7","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_8","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_9","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_1","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_10","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_2","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_3","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_4","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_5","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_6","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_7","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_8","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_9","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_1","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_10","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_2","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_3","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_4","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_5","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_6","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_7","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_8","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_9","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_1","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_10","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_2","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_3","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_4","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_5","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_6","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_7","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_8","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_9","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_1","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_10","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_2","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_3","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_4","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_5","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_6","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_7","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_8","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_9","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_1","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_10","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_2","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_3","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_4","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_5","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_6","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_7","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_8","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_9","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_1","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_10","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_2","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_3","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_4","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_5","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_6","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_7","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_8","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_9","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_1","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_10","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_2","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_3","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_4","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_5","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_6","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_7","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_8","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_9","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_1","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_10","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_2","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_3","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_4","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_5","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_6","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_7","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_8","score":0.5}
{"chain_id":"31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_9","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_1","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_10","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_2","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_3","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_4","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_5","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_6","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_7","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_8","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_9","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_1","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_10","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_2","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_3","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_4","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_5","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_6","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_7","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_8","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_9","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_1","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_10","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_2","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_3","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_4","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_5","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_6","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_7","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_8","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD684SNJH_1_9","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_1","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_10","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_2","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_3","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_4","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_5","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_6","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_7","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_8","score":0.5}
{"chain_id":"31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_9","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_1","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_10","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_2","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_3","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_4","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_5","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_6","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_7","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_8","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_9","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_1","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_10","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_2","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_3","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_4","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_5","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_6","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_7","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_8","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8762LRB_1_9","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_1","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_10","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_2","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_3","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_4","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_5","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_6","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_7","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_8","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_9","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_1","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_10","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_2","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_3","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_4","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_5","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_6","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_7","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_8","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_9","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_1","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_10","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_2","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_3","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_4","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_5","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_6","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_7","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_8","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_9","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_1","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_10","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_2","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_3","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_4","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_5","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_6","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_7","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_8","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_9","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_1","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_10","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_2","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_3","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_4","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_5","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_6","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_7","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_8","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_9","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_1","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_10","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_2","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_3","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_4","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_5","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_6","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_7","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_8","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN883WF3A_1_9","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_1","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_10","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_2","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_3","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_4","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_5","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_6","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_7","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_8","score":0.5}
{"chain_id":"31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_9","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_1","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_10","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_2","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_3","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_4","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_5","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_6","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_7","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_8","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_9","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_1","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_10","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_2","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_3","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_4","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_5","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_6","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_7","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_8","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X24923MGJZ_1_9","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_1","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_10","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_2","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_3","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_4","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_5","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_6","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_7","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_8","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X2493HSJG1_1_9","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_1","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_10","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_2","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_3","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_4","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_5","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_6","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_7","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_8","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GOWGJT_1_9","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_1","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_10","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_2","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_3","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_4","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_5","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_6","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_7","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_8","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OIODRFH1_1_9","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_1","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_10","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_2","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_3","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_4","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_5","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_6","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_7","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_8","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK2EM076_1_9","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_1","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_10","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_2","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_3","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_4","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_5","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_6","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_7","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_8","score":0.5}
{"chain_id":"324G5B4FB37SAL6E55O49KCK72L07M_1_9","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_1","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_10","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_2","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_3","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_4","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_5","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_6","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_7","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_8","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_9","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_1","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_10","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_2","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_3","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_4","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_5","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_6","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_7","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_8","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_9","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_1","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_10","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_2","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_3","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_4","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_5","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_6","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_7","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_8","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRLGQURC_1_9","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_1","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_10","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_2","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_3","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_4","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_5","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_6","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_7","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_8","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33ETM9E_1_9","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_1","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_10","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_2","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_3","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_4","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_5","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_6","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_7","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_8","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44401NVVAO_1_9","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_1","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_10","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_2","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_3","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_4","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_5","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_6","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_7","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_8","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD44404J5VAT_1_9","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_1","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_10","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_2","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_3","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_4","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_5","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_6","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_7","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_8","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440DJTAV5_1_9","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_1","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_10","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_2","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_3","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_4","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_5","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_6","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_7","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_8","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440XJHAVD_1_9","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_1","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_10","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_2","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_3","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_4","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_5","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_6","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_7","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_8","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_9","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_1","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_10","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_2","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_3","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_4","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_5","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_6","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_7","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_8","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYTM3S4S4_1_9","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_1","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_10","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_2","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_3","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_4","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_5","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_6","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_7","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_8","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_9","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_1","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_10","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_2","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_3","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_4","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_5","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_6","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_7","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_8","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_9","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_1","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_10","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_2","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_3","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_4","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_5","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_6","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_7","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_8","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_9","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_1","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_10","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_2","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_3","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_4","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_5","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_6","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_7","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_8","score":0.5}
{"chain_id":"32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_9","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_1","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_10","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_2","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_3","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_4","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_5","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_6","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_7","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_8","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWN4KKB39_1_9","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_1","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_10","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_2","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_3","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_4","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_5","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_6","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_7","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_8","score":0.5}
{"chain_id":"32ZKVD547FMBTP8119I3GKWNODJ3B6_1_9","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_1","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_10","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_2","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_3","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_4","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_5","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_6","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_7","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_8","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_9","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_1","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_10","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_2","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_3","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_4","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_5","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_6","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_7","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_8","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_9","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_1","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_10","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_2","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_3","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_4","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_5","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_6","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_7","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_8","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_9","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_1","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_10","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_2","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_3","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_4","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_5","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_6","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_7","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_8","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_9","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_1","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_10","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_2","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_3","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_4","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_5","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_6","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_7","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_8","score":0.5}
{"chain_id":"333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_9","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_1","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_10","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_2","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_3","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_4","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_5","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_6","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_7","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_8","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW6IJ2YE_1_9","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW88EY2N_1_1","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW88EY2N_1_2","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW88EY2N_1_3","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW88EY2N_1_4","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW88EY2N_1_5","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW88EY2N_1_6","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPW88EY2N_1_7","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_1","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_10","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_2","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_3","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_4","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_5","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_6","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_7","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_8","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBUPDLVU_1_9","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_1","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_10","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_2","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_3","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_4","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_5","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_6","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_7","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_8","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_9","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_1","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_10","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_2","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_3","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_4","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_5","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_6","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_7","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_8","score":0.5}
{"chain_id":"33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_9","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_1","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_10","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_2","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_3","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_4","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_5","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_6","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_7","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_8","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS76U3LV_1_9","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_1","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_10","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_2","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_3","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_4","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_5","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_6","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_7","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_8","score":0.5}
{"chain_id":"33CKWXB73JJE6OCUC8BVMF4HL6311D_1_9","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_1","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_10","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_2","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_3","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_4","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_5","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_6","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_7","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_8","score":0.5}
{"chain_id":"33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_9","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_1","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_10","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_2","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_3","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_4","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_5","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_6","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_7","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_8","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXCE8BHP_1_9","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_1","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_10","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_2","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_3","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_4","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_5","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_6","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_7","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_8","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXEHNHBI_1_9","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_1","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_10","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_2","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_3","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_4","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_5","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_6","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_7","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_8","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXXO9XBHG_1_9","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_1","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_10","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_2","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_3","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_4","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_5","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_6","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_7","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_8","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_9","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_1","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_10","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_2","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_3","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_4","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_5","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_6","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_7","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_8","score":0.5}
{"chain_id":"33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_9","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_1","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_10","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_2","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_3","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_4","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_5","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_6","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_7","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_8","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_9","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_1","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_10","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_2","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_3","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_4","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_5","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_6","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_7","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_8","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_9","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_1","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_10","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_2","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_3","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_4","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_5","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_6","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_7","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_8","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_9","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_1","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_10","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_2","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_3","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_4","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_5","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_6","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_7","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_8","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_9","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_1","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_10","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_2","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_3","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_4","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_5","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_6","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_7","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_8","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_9","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_1","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_10","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_2","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_3","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_4","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_5","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_6","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_7","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_8","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_9","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_1","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_10","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_2","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_3","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_4","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_5","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_6","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_7","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_8","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N921MN3_1_9","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_1","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_10","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_2","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_3","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_4","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_5","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_6","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_7","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_8","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_9","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_1","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_10","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_2","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_3","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_4","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_5","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_6","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_7","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_8","score":0.5}
{"chain_id":"33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_9","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_1","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_10","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_2","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_3","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_4","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_5","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_6","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_7","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_8","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_9","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_1","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_10","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_2","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_3","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_4","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_5","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_6","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_7","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_8","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_9","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_1","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_10","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_2","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_3","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_4","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_5","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_6","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_7","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_8","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_9","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_1","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_10","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_2","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_3","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_4","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_5","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_6","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_7","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_8","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_9","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_1","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_10","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_2","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_3","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_4","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_5","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_6","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_7","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_8","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_9","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_1","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_10","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_2","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_3","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_4","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_5","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_6","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_7","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_8","score":0.5}
{"chain_id":"33PPO7FECVEJYPO408GWFGMCDAOIDF_1_9","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_1","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_10","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_2","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_3","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_4","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_5","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_6","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_7","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_8","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_9","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_1","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_10","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_2","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_3","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_4","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_5","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_6","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_7","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_8","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GU8SEVUV_1_9","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_1","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_10","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_2","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_3","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_4","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_5","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_6","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_7","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_8","score":0.5}
{"chain_id":"340UGXU9DY0A1XJQLA5445GUBHZVUX_1_9","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_1","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_10","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_2","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_3","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_4","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_5","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_6","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_7","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_8","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_9","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_1","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_10","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_2","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_3","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_4","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_5","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_6","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_7","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_8","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_9","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_1","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_10","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_2","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_3","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_4","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_5","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_6","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_7","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_8","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I05694VE_1_9","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_1","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_10","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_2","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_3","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_4","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_5","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_6","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_7","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_8","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWO7QIK_1_9","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_1","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_10","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_2","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_3","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_4","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_5","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_6","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_7","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_8","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QX114WJV_1_9","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_1","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_10","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_2","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_3","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_4","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_5","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_6","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_7","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_8","score":0.5}
{"chain_id":"34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_9","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_1","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_10","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_2","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_3","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_4","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_5","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_6","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_7","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_8","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL71M310H_1_9","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_1","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_10","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_2","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_3","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_4","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_5","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_6","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_7","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_8","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_9","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_1","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_10","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_2","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_3","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_4","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_5","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_6","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_7","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_8","score":0.5}
{"chain_id":"34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_9","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_1","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_10","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_2","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_3","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_4","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_5","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_6","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_7","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_8","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_9","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_1","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_10","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_2","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_3","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_4","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_5","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_6","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_7","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_8","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N01HLG_1_9","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_1","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_10","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_2","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_3","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_4","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_5","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_6","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_7","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_8","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2N09HLO_1_9","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_1","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_10","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_2","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_3","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_4","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_5","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_6","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_7","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_8","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YKB60CH_1_9","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_1","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_10","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_2","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_3","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_4","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_5","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_6","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_7","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_8","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_9","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_1","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_10","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_2","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_3","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_4","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_5","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_6","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_7","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_8","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHF3B691_1_9","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_1","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_10","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_2","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_3","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_4","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_5","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_6","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_7","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_8","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_9","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_1","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_10","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_2","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_3","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_4","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_5","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_6","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_7","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_8","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_9","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_1","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_10","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_2","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_3","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_4","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_5","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_6","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_7","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_8","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_9","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_1","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_10","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_2","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_3","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_4","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_5","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_6","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_7","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_8","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_9","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_1","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_10","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_2","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_3","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_4","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_5","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_6","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_7","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_8","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_9","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_1","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_10","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_2","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_3","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_4","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_5","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_6","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_7","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_8","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427HIT08_1_9","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_1","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_10","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_2","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_3","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_4","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_5","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_6","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_7","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_8","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG427UF0T2_1_9","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_1","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_10","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_2","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_3","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_4","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_5","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_6","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_7","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_8","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_9","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_1","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_10","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_2","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_3","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_4","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_5","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_6","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_7","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_8","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_9","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_1","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_10","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_2","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_3","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_4","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_5","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_6","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_7","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_8","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY4NLVX5_1_9","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_1","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_10","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_2","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_3","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_4","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_5","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_6","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_7","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_8","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFY5SGXVD_1_9","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_1","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_10","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_2","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_3","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_4","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_5","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_6","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_7","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_8","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_9","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_1","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_10","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_2","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_3","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_4","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_5","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_6","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_7","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_8","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_9","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_1","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_10","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_2","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_3","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_4","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_5","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_6","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_7","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_8","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_9","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_1","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_10","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_2","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_3","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_4","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_5","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_6","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_7","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_8","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_9","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_1","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_10","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_2","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_3","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_4","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_5","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_6","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_7","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_8","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18F4V0564_1_9","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_1","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_10","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_2","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_3","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_4","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_5","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_6","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_7","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_8","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FV1B65J_1_9","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_1","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_10","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_2","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_3","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_4","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_5","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_6","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_7","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_8","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_9","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_1","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_10","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_2","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_3","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_4","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_5","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_6","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_7","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_8","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_9","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_1","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_10","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_2","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_3","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_4","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_5","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_6","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_7","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_8","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_9","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_1","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_10","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_2","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_3","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_4","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_5","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_6","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_7","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_8","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_9","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_1","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_10","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_2","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_3","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_4","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_5","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_6","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_7","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_8","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_9","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_1","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_10","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_2","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_3","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_4","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_5","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_6","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_7","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_8","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPX2XBIFU_1_9","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_1","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_10","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_2","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_3","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_4","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_5","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_6","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_7","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_8","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4ISJG62_1_9","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_1","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_10","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_2","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_3","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_4","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_5","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_6","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_7","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_8","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_9","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_1","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_10","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_2","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_3","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_4","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_5","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_6","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_7","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_8","score":0.5}
{"chain_id":"36H9ULYP62TCRKM69WWMFH4X616JFI_1_9","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_1","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_10","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_2","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_3","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_4","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_5","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_6","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_7","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_8","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_9","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_1","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_10","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_2","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_3","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_4","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_5","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_6","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_7","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_8","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_9","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_1","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_10","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_2","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_3","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_4","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_5","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_6","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_7","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_8","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE34WMW2_1_9","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_1","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_10","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_2","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_3","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_4","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_5","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_6","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_7","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_8","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_9","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_1","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_10","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_2","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_3","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_4","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_5","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_6","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_7","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_8","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPTWPXH5_1_9","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_1","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_10","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_2","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_3","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_4","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_5","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_6","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_7","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_8","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_9","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_1","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_10","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_2","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_3","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_4","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_5","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_6","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_7","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_8","score":0.5}
{"chain_id":"36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_9","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_1","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_10","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_2","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_3","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_4","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_5","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_6","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_7","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_8","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_9","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_1","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_10","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_2","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_3","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_4","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_5","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_6","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_7","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_8","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_9","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_1","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_10","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_2","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_3","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_4","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_5","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_6","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_7","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_8","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W401V3ZHI_1_9","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_1","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_10","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_2","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_3","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_4","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_5","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_6","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_7","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_8","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_9","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_1","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_10","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_2","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_3","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_4","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_5","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_6","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_7","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_8","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_9","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_1","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_10","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_2","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_3","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_4","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_5","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_6","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_7","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_8","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_9","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_1","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_10","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_2","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_3","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_4","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_5","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_6","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_7","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_8","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_9","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_1","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_10","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_2","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_3","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_4","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_5","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_6","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_7","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_8","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_9","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_1","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_10","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_2","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_3","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_4","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_5","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_6","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_7","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_8","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P24ZTR4_1_9","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_1","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_10","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_2","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_3","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_4","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_5","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_6","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_7","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_8","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P44QTRX_1_9","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_1","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_10","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_2","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_3","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_4","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_5","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_6","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_7","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_8","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_9","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_1","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_10","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_2","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_3","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_4","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_5","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_6","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_7","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_8","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_9","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_1","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_10","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_2","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_3","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_4","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_5","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_6","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_7","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_8","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE665DAI1_1_9","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_1","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_10","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_2","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_3","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_4","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_5","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_6","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_7","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_8","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_9","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_1","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_10","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_2","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_3","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_4","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_5","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_6","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_7","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_8","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPT2PELB_1_9","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_1","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_10","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_2","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_3","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_4","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_5","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_6","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_7","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_8","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW76IZD6M_1_9","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_1","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_10","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_2","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_3","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_4","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_5","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_6","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_7","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_8","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_9","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_1","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_10","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_2","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_3","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_4","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_5","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_6","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_7","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_8","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW7IEPD6G_1_9","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_1","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_10","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_2","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_3","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_4","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_5","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_6","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_7","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_8","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_9","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_1","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_10","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_2","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_3","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_4","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_5","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_6","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_7","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_8","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_9","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_1","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_10","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_2","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_3","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_4","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_5","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_6","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_7","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_8","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_9","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_1","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_10","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_2","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_3","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_4","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_5","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_6","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_7","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_8","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQXCWS12_1_9","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_1","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_10","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_2","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_3","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_4","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_5","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_6","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_7","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_8","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_9","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_1","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_10","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_2","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_3","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_4","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_5","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_6","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_7","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_8","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_9","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_1","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_10","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_2","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_3","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_4","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_5","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_6","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_7","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_8","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_9","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_1","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_10","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_2","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_3","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_4","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_5","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_6","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_7","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_8","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_9","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_1","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_10","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_2","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_3","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_4","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_5","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_6","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_7","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_8","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_9","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_1","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_10","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_2","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_3","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_4","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_5","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_6","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_7","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_8","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_9","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_1","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_10","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_2","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_3","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_4","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_5","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_6","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_7","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_8","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A114V7L_1_9","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_1","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_10","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_2","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_3","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_4","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_5","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_6","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_7","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_8","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_9","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_1","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_10","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_2","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_3","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_4","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_5","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_6","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_7","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_8","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_9","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_1","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_10","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_2","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_3","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_4","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_5","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_6","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_7","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_8","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LS02CRL_1_9","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_1","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_10","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_2","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_3","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_4","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_5","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_6","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_7","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_8","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_9","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_1","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_10","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_2","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_3","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_4","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_5","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_6","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_7","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_8","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S86PDEUS_1_9","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_1","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_10","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_2","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_3","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_4","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_5","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_6","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_7","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_8","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S87MEUE4_1_9","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_1","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_10","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_2","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_3","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_4","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_5","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_6","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_7","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_8","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_9","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_1","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_10","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_2","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_3","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_4","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_5","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_6","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_7","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_8","score":0.5}
{"chain_id":"386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_9","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_1","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_10","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_2","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_3","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_4","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_5","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_6","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_7","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_8","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_9","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_1","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_10","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_2","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_3","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_4","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_5","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_6","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_7","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_8","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_9","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_1","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_10","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_2","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_3","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_4","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_5","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_6","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_7","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_8","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_9","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_1","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_10","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_2","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_3","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_4","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_5","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_6","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_7","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_8","score":0.5}
{"chain_id":"38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_9","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_1","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_10","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_2","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_3","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_4","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_5","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_6","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_7","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_8","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_9","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_1","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_10","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_2","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_3","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_4","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_5","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_6","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_7","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_8","score":0.5}
{"chain_id":"38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_9","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_1","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_10","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_2","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_3","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_4","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_5","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_6","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_7","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_8","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_9","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_1","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_10","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_2","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_3","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_4","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_5","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_6","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_7","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_8","score":0.5}
{"chain_id":"392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_9","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_1","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_10","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_2","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_3","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_4","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_5","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_6","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_7","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_8","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_9","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_1","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_10","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_2","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_3","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_4","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_5","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_6","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_7","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_8","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NAGQZEK_1_9","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_1","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_10","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_2","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_3","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_4","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_5","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_6","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_7","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_8","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_9","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_1","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_10","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_2","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_3","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_4","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_5","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_6","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_7","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_8","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCN8EZX_1_9","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_1","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_10","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_2","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_3","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_4","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_5","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_6","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_7","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_8","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_9","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_1","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_10","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_2","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_3","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_4","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_5","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_6","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_7","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_8","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_9","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_1","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_10","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_2","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_3","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_4","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_5","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_6","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_7","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_8","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_9","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_1","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_10","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_2","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_3","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_4","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_5","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_6","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_7","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_8","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_9","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_1","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_10","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_2","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_3","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_4","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_5","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_6","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_7","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_8","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_9","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_1","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_10","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_2","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_3","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_4","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_5","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_6","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_7","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_8","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_9","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_1","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_10","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_2","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_3","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_4","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_5","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_6","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_7","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_8","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO122VC0_1_9","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_1","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_10","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_2","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_3","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_4","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_5","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_6","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_7","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_8","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2H2VCV_1_9","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_1","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_10","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_2","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_3","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_4","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_5","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_6","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_7","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_8","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUODJCCV1_1_9","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_1","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_10","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_2","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_3","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_4","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_5","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_6","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_7","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_8","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_9","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_1","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_10","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_2","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_3","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_4","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_5","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_6","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_7","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_8","score":0.5}
{"chain_id":"39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_9","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_1","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_10","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_2","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_3","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_4","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_5","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_6","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_7","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_8","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_9","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_1","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_10","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_2","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_3","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_4","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_5","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_6","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_7","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_8","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_9","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_1","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_10","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_2","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_3","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_4","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_5","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_6","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_7","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_8","score":0.5}
{"chain_id":"39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_9","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_1","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_10","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_2","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_3","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_4","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_5","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_6","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_7","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_8","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_9","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_1","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_10","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_2","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_3","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_4","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_5","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_6","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_7","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_8","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_9","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_1","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_10","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_2","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_3","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_4","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_5","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_6","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_7","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_8","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MJJX832_1_9","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_1","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_10","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_2","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_3","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_4","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_5","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_6","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_7","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_8","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_9","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_1","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_10","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_2","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_3","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_4","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_5","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_6","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_7","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_8","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_9","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_1","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_10","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_2","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_3","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_4","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_5","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_6","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_7","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_8","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_9","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_1","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_10","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_2","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_3","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_4","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_5","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_6","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_7","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_8","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_9","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_1","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_10","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_2","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_3","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_4","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_5","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_6","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_7","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_8","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_9","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_1","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_10","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_2","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_3","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_4","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_5","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_6","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_7","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_8","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_9","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_1","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_10","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_2","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_3","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_4","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_5","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_6","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_7","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_8","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_9","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_1","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_10","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_2","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_3","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_4","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_5","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_6","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_7","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_8","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_9","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_1","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_10","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_2","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_3","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_4","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_5","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_6","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_7","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_8","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_9","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_1","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_10","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_2","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_3","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_4","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_5","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_6","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_7","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_8","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_9","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_1","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_10","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_2","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_3","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_4","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_5","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_6","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_7","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_8","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_9","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_1","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_10","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_2","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_3","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_4","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_5","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_6","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_7","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_8","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_9","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_1","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_10","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_2","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_3","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_4","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_5","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_6","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_7","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_8","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_9","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_1","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_10","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_2","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_3","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_4","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_5","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_6","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_7","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_8","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_9","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_1","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_10","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_2","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_3","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_4","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_5","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_6","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_7","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_8","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_9","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_1","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_10","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_2","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_3","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_4","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_5","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_6","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_7","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_8","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_9","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_1","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_10","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_2","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_3","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_4","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_5","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_6","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_7","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_8","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_9","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_1","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_10","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_2","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_3","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_4","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_5","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_6","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_7","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_8","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_9","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_1","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_10","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_2","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_3","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_4","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_5","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_6","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_7","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_8","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_9","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_1","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_10","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_2","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_3","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_4","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_5","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_6","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_7","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_8","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_9","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_1","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_10","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_2","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_3","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_4","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_5","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_6","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_7","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_8","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FVL1CHZ_1_9","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_1","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_10","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_2","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_3","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_4","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_5","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_6","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_7","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_8","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_9","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_1","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_10","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_2","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_3","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_4","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_5","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_6","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_7","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_8","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_9","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_1","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_10","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_2","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_3","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_4","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_5","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_6","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_7","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_8","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_9","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_1","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_10","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_2","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_3","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_4","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_5","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_6","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_7","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_8","score":0.5}
{"chain_id":"3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_9","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_1","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_10","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_2","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_3","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_4","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_5","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_6","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_7","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_8","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_9","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_1","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_10","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_2","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_3","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_4","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_5","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_6","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_7","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_8","score":0.5}
{"chain_id":"3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_9","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_1","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_10","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_2","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_3","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_4","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_5","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_6","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_7","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_8","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_9","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_1","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_10","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_2","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_3","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_4","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_5","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_6","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_7","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_8","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_9","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_1","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_10","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_2","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_3","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_4","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_5","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_6","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_7","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_8","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI93YAF_1_9","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_1","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_10","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_2","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_3","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_4","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_5","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_6","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_7","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_8","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_9","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_1","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_10","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_2","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_3","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_4","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_5","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_6","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_7","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_8","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP468MXIR_1_9","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_1","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_10","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_2","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_3","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_4","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_5","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_6","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_7","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_8","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_9","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_1","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_10","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_2","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_3","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_4","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_5","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_6","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_7","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_8","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_9","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_1","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_10","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_2","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_3","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_4","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_5","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_6","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_7","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_8","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304XIP305_1_9","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_1","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_10","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_2","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_3","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_4","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_5","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_6","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_7","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_8","score":0.5}
{"chain_id":"3AZHRG4CU4JA925R3TLEW304Z91301_1_9","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_1","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_10","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_2","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_3","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_4","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_5","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_6","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_7","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_8","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_9","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_1","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_10","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_2","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_3","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_4","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_5","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_6","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_7","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_8","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_9","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_1","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_10","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_2","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_3","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_4","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_5","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_6","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_7","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_8","score":0.5}
{"chain_id":"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_9","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_1","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_10","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_2","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_3","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_4","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_5","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_6","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_7","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_8","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_9","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_1","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_10","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_2","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_3","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_4","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_5","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_6","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_7","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_8","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_9","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_1","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_10","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_2","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_3","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_4","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_5","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_6","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_7","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_8","score":0.5}
{"chain_id":"3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_9","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_1","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_10","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_2","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_3","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_4","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_5","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_6","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_7","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_8","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7931S5SRD_1_9","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_1","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_10","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_2","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_3","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_4","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_5","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_6","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_7","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_8","score":0.5}
{"chain_id":"3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_9","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_1","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_10","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_2","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_3","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_4","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_5","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_6","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_7","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_8","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_9","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_1","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_10","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_2","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_3","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_4","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_5","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_6","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_7","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_8","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_9","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_1","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_10","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_2","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_3","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_4","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_5","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_6","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_7","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_8","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_9","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_1","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_10","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_2","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_3","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_4","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_5","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_6","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_7","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_8","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_9","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_1","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_10","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_2","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_3","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_4","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_5","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_6","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_7","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_8","score":0.5}
{"chain_id":"3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_9","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_1","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_10","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_2","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_3","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_4","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_5","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_6","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_7","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_8","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_9","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_1","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_10","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_2","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_3","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_4","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_5","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_6","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_7","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_8","score":0.5}
{"chain_id":"3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_9","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_1","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_10","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_2","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_3","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_4","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_5","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_6","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_7","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_8","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J4EM991_1_9","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_1","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_10","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_2","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_3","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_4","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_5","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_6","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_7","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_8","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_9","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_1","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_10","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_2","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_3","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_4","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_5","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_6","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_7","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_8","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_9","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_1","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_10","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_2","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_3","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_4","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_5","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_6","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_7","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_8","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_9","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_1","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_10","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_2","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_3","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_4","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_5","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_6","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_7","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_8","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_9","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_1","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_10","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_2","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_3","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_4","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_5","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_6","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_7","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_8","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_9","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_1","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_10","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_2","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_3","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_4","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_5","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_6","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_7","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_8","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_9","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_1","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_10","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_2","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_3","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_4","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_5","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_6","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_7","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_8","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_9","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_1","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_10","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_2","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_3","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_4","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_5","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_6","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_7","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_8","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_9","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_1","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_10","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_2","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_3","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_4","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_5","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_6","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_7","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_8","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_9","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_1","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_10","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_2","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_3","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_4","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_5","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_6","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_7","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_8","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_9","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_1","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_10","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_2","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_3","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_4","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_5","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_6","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_7","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_8","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_9","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_1","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_10","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_2","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_3","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_4","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_5","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_6","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_7","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_8","score":0.5}
{"chain_id":"3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_9","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_1","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_10","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_2","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_3","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_4","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_5","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_6","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_7","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_8","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_9","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_1","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_10","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_2","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_3","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_4","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_5","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_6","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_7","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_8","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_9","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_1","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_10","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_2","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_3","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_4","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_5","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_6","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_7","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_8","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_9","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_1","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_10","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_2","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_3","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_4","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_5","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_6","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_7","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_8","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_9","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_1","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_10","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_2","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_3","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_4","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_5","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_6","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_7","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_8","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_9","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_1","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_10","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_2","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_3","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_4","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_5","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_6","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_7","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_8","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_9","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_1","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_10","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_2","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_3","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_4","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_5","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_6","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_7","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_8","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_9","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_1","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_10","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_2","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_3","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_4","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_5","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_6","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_7","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_8","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_9","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_1","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_10","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_2","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_3","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_4","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_5","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_6","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_7","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_8","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_9","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_1","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_10","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_2","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_3","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_4","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_5","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_6","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_7","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_8","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_9","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_1","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_10","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_2","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_3","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_4","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_5","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_6","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_7","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_8","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_9","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_1","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_10","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_2","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_3","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_4","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_5","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_6","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_7","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_8","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_9","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_1","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_10","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_2","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_3","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_4","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_5","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_6","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_7","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_8","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_9","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_1","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_10","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_2","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_3","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_4","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_5","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_6","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_7","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_8","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_9","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_1","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_10","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_2","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_3","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_4","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_5","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_6","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_7","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_8","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_9","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_1","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_10","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_2","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_3","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_4","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_5","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_6","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_7","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_8","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_9","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_1","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_10","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_2","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_3","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_4","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_5","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_6","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_7","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_8","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_9","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_1","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_10","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_2","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_3","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_4","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_5","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_6","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_7","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_8","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_9","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_1","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_10","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_2","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_3","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_4","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_5","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_6","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_7","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_8","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_9","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_1","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_10","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_2","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_3","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_4","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_5","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_6","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_7","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_8","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_9","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_1","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_10","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_2","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_3","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_4","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_5","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_6","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_7","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_8","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG216J73_1_9","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_1","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_10","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_2","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_3","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_4","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_5","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_6","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_7","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_8","score":0.5}
{"chain_id":"3CTOC39K37PZCR70RDYARPRG690J7H_1_9","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_1","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_10","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_2","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_3","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_4","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_5","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_6","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_7","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_8","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_9","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_1","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_10","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_2","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_3","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_4","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_5","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_6","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_7","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_8","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_9","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_1","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_10","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_2","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_3","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_4","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_5","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_6","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_7","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_8","score":0.5}
{"chain_id":"3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_9","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_1","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_10","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_2","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_3","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_4","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_5","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_6","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_7","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_8","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_9","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_1","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_10","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_2","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_3","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_4","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_5","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_6","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_7","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_8","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_9","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_1","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_10","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_2","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_3","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_4","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_5","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_6","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_7","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_8","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_9","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_1","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_10","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_2","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_3","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_4","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_5","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_6","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_7","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_8","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_9","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_1","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_10","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_2","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_3","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_4","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_5","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_6","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_7","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_8","score":0.5}
{"chain_id":"3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_9","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_1","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_10","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_2","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_3","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_4","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_5","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_6","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_7","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_8","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV62DBACE6_1_9","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_1","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_10","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_2","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_3","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_4","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_5","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_6","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_7","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_8","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921R4F647_1_9","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_1","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_10","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_2","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_3","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_4","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_5","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_6","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_7","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_8","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_9","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_1","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_10","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_2","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_3","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_4","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_5","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_6","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_7","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_8","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_9","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_1","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_10","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_2","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_3","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_4","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_5","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_6","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_7","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_8","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_9","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_1","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_10","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_2","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_3","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_4","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_5","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_6","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_7","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_8","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_9","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_1","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_10","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_2","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_3","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_4","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_5","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_6","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_7","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_8","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_9","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_1","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_10","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_2","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_3","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_4","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_5","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_6","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_7","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_8","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_9","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_1","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_10","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_2","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_3","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_4","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_5","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_6","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_7","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_8","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_9","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_1","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_10","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_2","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_3","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_4","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_5","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_6","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_7","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_8","score":0.5}
{"chain_id":"3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_9","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_1","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_10","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_2","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_3","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_4","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_5","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_6","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_7","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_8","score":0.5}
{"chain_id":"3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_9","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_1","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_10","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_2","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_3","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_4","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_5","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_6","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_7","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_8","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_9","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_1","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_10","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_2","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_3","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_4","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_5","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_6","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_7","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_8","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_9","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_1","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_10","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_2","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_3","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_4","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_5","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_6","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_7","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_8","score":0.5}
{"chain_id":"3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_9","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_1","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_10","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_2","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_3","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_4","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_5","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_6","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_7","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_8","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_9","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_1","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_10","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_2","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_3","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_4","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_5","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_6","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_7","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_8","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_9","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_1","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_10","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_2","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_3","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_4","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_5","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_6","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_7","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_8","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_9","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_1","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_10","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_2","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_3","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_4","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_5","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_6","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_7","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_8","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_9","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_1","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_10","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_2","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_3","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_4","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_5","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_6","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_7","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_8","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_9","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_1","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_10","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_2","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_3","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_4","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_5","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_6","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_7","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_8","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_9","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_1","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_10","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_2","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_3","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_4","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_5","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_6","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_7","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_8","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_9","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_1","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_10","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_2","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_3","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_4","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_5","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_6","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_7","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_8","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_9","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_1","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_10","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_2","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_3","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_4","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_5","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_6","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_7","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_8","score":0.5}
{"chain_id":"3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_9","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_1","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_10","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_2","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_3","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_4","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_5","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_6","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_7","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_8","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_9","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_1","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_10","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_2","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_3","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_4","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_5","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_6","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_7","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_8","score":0.5}
{"chain_id":"3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_9","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_1","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_10","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_2","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_3","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_4","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_5","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_6","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_7","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_8","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_9","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_1","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_10","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_2","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_3","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_4","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_5","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_6","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_7","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_8","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_9","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_1","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_10","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_2","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_3","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_4","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_5","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_6","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_7","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_8","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_9","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_1","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_10","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_2","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_3","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_4","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_5","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_6","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_7","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_8","score":0.5}
{"chain_id":"3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_9","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_1","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_10","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_2","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_3","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_4","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_5","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_6","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_7","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_8","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_9","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_1","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_10","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_2","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_3","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_4","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_5","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_6","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_7","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_8","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_9","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_1","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_10","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_2","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_3","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_4","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_5","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_6","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_7","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_8","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_9","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_1","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_10","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_2","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_3","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_4","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_5","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_6","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_7","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_8","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZECOK93_1_9","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_1","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_10","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_2","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_3","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_4","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_5","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_6","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_7","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_8","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_9","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_1","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_10","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_2","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_3","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_4","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_5","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_6","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_7","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_8","score":0.5}
{"chain_id":"3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_9","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_1","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_10","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_2","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_3","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_4","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_5","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_6","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_7","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_8","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_9","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_1","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_10","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_2","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_3","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_4","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_5","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_6","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_7","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_8","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_9","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_1","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_10","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_2","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_3","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_4","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_5","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_6","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_7","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_8","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_9","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_1","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_10","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_2","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_3","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_4","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_5","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_6","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_7","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_8","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_9","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_1","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_10","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_2","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_3","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_4","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_5","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_6","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_7","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_8","score":0.5}
{"chain_id":"3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_9","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_1","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_10","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_2","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_3","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_4","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_5","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_6","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_7","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_8","score":0.5}
{"chain_id":"3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_9","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_1","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_10","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_2","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_3","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_4","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_5","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_6","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_7","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_8","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_9","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_1","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_10","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_2","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_3","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_4","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_5","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_6","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_7","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_8","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_9","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_1","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_10","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_2","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_3","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_4","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_5","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_6","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_7","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_8","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_9","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_1","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_10","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_2","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_3","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_4","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_5","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_6","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_7","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_8","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_9","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_1","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_10","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_2","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_3","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_4","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_5","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_6","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_7","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_8","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ99O2PFK_1_9","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_1","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_10","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_2","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_3","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_4","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_5","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_6","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_7","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_8","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_9","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_1","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_10","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_2","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_3","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_4","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_5","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_6","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_7","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_8","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_9","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_1","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_10","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_2","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_3","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_4","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_5","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_6","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_7","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_8","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_9","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_1","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_10","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_2","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_3","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_4","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_5","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_6","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_7","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_8","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_9","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_1","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_10","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_2","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_3","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_4","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_5","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_6","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_7","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_8","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_9","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_1","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_10","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_2","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_3","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_4","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_5","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_6","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_7","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_8","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_9","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_1","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_10","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_2","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_3","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_4","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_5","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_6","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_7","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_8","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_9","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_1","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_10","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_2","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_3","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_4","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_5","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_6","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_7","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_8","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_9","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_1","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_10","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_2","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_3","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_4","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_5","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_6","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_7","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_8","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_9","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_1","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_10","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_2","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_3","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_4","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_5","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_6","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_7","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_8","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_9","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_1","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_10","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_2","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_3","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_4","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_5","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_6","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_7","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_8","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_9","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_1","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_10","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_2","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_3","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_4","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_5","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_6","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_7","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_8","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_9","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_1","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_10","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_2","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_3","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_4","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_5","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_6","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_7","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_8","score":0.5}
{"chain_id":"3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_9","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_1","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_10","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_2","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_3","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_4","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_5","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_6","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_7","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_8","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_9","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_1","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_10","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_2","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_3","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_4","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_5","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_6","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_7","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_8","score":0.5}
{"chain_id":"3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_9","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_1","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_10","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_2","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_3","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_4","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_5","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_6","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_7","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_8","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_9","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_1","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_10","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_2","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_3","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_4","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_5","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_6","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_7","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_8","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_9","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_1","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_10","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_2","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_3","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_4","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_5","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_6","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_7","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_8","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_9","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_1","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_10","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_2","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_3","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_4","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_5","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_6","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_7","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_8","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_9","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_1","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_10","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_2","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_3","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_4","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_5","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_6","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_7","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_8","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBIK876N_1_9","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_1","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_10","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_2","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_3","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_4","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_5","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_6","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_7","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_8","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_9","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_1","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_10","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_2","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_3","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_4","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_5","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_6","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_7","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_8","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_9","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_1","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_10","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_2","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_3","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_4","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_5","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_6","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_7","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_8","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_9","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_1","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_10","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_2","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_3","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_4","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_5","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_6","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_7","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_8","score":0.5}
{"chain_id":"3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_9","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_1","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_10","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_2","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_3","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_4","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_5","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_6","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_7","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_8","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_9","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_1","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_10","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_2","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_3","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_4","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_5","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_6","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_7","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_8","score":0.5}
{"chain_id":"3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_9","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_1","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_10","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_2","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_3","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_4","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_5","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_6","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_7","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_8","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSWZTM82_1_9","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_1","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_10","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_2","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_3","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_4","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_5","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_6","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_7","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_8","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_9","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_1","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_10","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_2","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_3","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_4","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_5","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_6","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_7","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_8","score":0.5}
{"chain_id":"3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_9","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_1","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_10","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_2","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_3","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_4","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_5","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_6","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_7","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_8","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_9","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_1","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_10","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_2","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_3","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_4","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_5","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_6","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_7","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_8","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_9","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_1","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_10","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_2","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_3","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_4","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_5","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_6","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_7","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_8","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_9","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_1","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_10","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_2","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_3","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_4","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_5","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_6","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_7","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_8","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_9","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_1","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_10","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_2","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_3","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_4","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_5","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_6","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_7","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_8","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_9","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_1","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_10","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_2","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_3","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_4","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_5","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_6","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_7","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_8","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_9","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_1","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_10","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_2","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_3","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_4","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_5","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_6","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_7","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_8","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_9","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_1","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_10","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_2","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_3","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_4","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_5","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_6","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_7","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_8","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_9","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_1","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_10","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_2","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_3","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_4","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_5","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_6","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_7","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_8","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_9","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_1","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_10","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_2","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_3","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_4","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_5","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_6","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_7","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_8","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_9","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_1","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_10","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_2","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_3","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_4","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_5","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_6","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_7","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_8","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_9","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_1","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_10","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_2","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_3","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_4","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_5","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_6","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_7","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_8","score":0.5}
{"chain_id":"3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_9","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_1","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_10","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_2","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_3","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_4","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_5","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_6","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_7","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_8","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_9","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_1","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_10","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_2","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_3","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_4","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_5","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_6","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_7","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_8","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU31KF9C_1_9","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_1","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_10","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_2","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_3","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_4","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_5","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_6","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_7","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_8","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_9","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_1","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_10","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_2","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_3","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_4","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_5","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_6","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_7","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_8","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_9","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_1","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_10","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_2","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_3","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_4","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_5","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_6","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_7","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_8","score":0.5}
{"chain_id":"3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_9","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_1","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_10","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_2","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_3","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_4","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_5","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_6","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_7","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_8","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A1O10MB_1_9","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_1","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_10","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_2","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_3","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_4","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_5","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_6","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_7","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_8","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3I5M0R_1_9","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_1","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_10","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_2","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_3","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_4","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_5","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_6","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_7","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_8","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUR37O21_1_9","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_1","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_10","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_2","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_3","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_4","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_5","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_6","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_7","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_8","score":0.5}
{"chain_id":"3HUTX6F6VUM6R11R1E9K3URUW242OF_1_9","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_1","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_10","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_2","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_3","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_4","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_5","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_6","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_7","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_8","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_9","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_1","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_10","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_2","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_3","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_4","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_5","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_6","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_7","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_8","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_9","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_1","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_10","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_2","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_3","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_4","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_5","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_6","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_7","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_8","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_9","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_1","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_10","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_2","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_3","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_4","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_5","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_6","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_7","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_8","score":0.5}
{"chain_id":"3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_9","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_1","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_10","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_2","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_3","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_4","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_5","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_6","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_7","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_8","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKN8ISEE_1_9","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_1","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_10","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_2","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_3","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_4","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_5","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_6","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_7","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_8","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKP7IES0_1_9","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_1","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_10","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_2","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_3","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_4","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_5","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_6","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_7","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_8","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG04N2F5_1_9","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_1","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_10","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_2","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_3","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_4","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_5","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_6","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_7","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_8","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IG0R4F29_1_9","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_1","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_10","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_2","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_3","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_4","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_5","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_6","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_7","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_8","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCLSBUPI_1_9","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_1","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_10","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_2","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_3","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_4","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_5","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_6","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_7","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_8","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM5LPUE_1_9","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_1","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_10","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_2","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_3","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_4","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_5","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_6","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_7","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_8","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCM9TPUU_1_9","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_1","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_10","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_2","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_3","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_4","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_5","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_6","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_7","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_8","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_9","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_1","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_10","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_2","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_3","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_4","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_5","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_6","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_7","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_8","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_9","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_1","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_10","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_2","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_3","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_4","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_5","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_6","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_7","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_8","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_9","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_1","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_10","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_2","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_3","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_4","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_5","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_6","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_7","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_8","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_9","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_1","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_10","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_2","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_3","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_4","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_5","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_6","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_7","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_8","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_9","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_1","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_10","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_2","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_3","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_4","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_5","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_6","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_7","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_8","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_9","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_1","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_10","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_2","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_3","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_4","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_5","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_6","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_7","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_8","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_9","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_1","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_10","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_2","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_3","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_4","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_5","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_6","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_7","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_8","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_9","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_1","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_10","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_2","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_3","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_4","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_5","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_6","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_7","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_8","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_9","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_1","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_10","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_2","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_3","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_4","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_5","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_6","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_7","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_8","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_9","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_1","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_10","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_2","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_3","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_4","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_5","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_6","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_7","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_8","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS36HONS_1_9","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_1","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_10","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_2","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_3","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_4","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_5","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_6","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_7","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_8","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_9","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_1","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_10","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_2","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_3","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_4","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_5","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_6","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_7","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_8","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_9","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_1","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_10","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_2","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_3","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_4","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_5","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_6","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_7","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_8","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_9","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_1","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_10","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_2","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_3","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_4","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_5","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_6","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_7","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_8","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_9","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_1","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_10","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_2","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_3","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_4","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_5","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_6","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_7","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_8","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_9","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_1","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_10","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_2","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_3","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_4","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_5","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_6","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_7","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_8","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_9","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_1","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_10","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_2","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_3","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_4","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_5","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_6","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_7","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_8","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_9","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_1","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_10","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_2","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_3","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_4","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_5","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_6","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_7","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_8","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_9","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_1","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_10","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_2","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_3","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_4","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_5","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_6","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_7","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_8","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_9","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_1","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_10","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_2","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_3","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_4","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_5","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_6","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_7","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_8","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_9","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_1","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_10","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_2","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_3","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_4","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_5","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_6","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_7","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_8","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_9","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_1","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_10","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_2","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_3","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_4","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_5","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_6","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_7","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_8","score":0.5}
{"chain_id":"3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_9","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_1","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_10","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_2","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_3","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_4","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_5","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_6","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_7","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_8","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET5T0T68_1_9","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_1","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_10","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_2","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_3","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_4","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_5","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_6","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_7","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_8","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET6G56T1_1_9","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_1","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_10","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_2","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_3","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_4","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_5","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_6","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_7","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_8","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET8NU6T6_1_9","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_1","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_10","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_2","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_3","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_4","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_5","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_6","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_7","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_8","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_9","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_1","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_10","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_2","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_3","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_4","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_5","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_6","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_7","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_8","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_9","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_1","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_10","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_2","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_3","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_4","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_5","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_6","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_7","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_8","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36COE4606K_1_9","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_1","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_10","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_2","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_3","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_4","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_5","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_6","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_7","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_8","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_9","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_1","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_10","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_2","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_3","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_4","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_5","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_6","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_7","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_8","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_9","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_1","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_10","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_2","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_3","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_4","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_5","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_6","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_7","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_8","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_9","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_1","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_10","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_2","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_3","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_4","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_5","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_6","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_7","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_8","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_9","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_1","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_10","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_2","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_3","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_4","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_5","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_6","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_7","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_8","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RN6OPZS_1_9","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_1","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_10","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_2","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_3","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_4","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_5","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_6","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_7","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_8","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_9","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_1","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_10","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_2","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_3","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_4","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_5","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_6","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_7","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_8","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_9","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_1","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_10","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_2","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_3","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_4","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_5","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_6","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_7","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_8","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_9","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_1","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_10","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_2","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_3","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_4","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_5","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_6","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_7","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_8","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_9","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_1","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_10","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_2","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_3","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_4","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_5","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_6","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_7","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_8","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_9","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_1","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_10","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_2","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_3","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_4","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_5","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_6","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_7","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_8","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_9","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_1","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_10","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_2","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_3","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_4","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_5","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_6","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_7","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_8","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_9","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_1","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_10","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_2","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_3","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_4","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_5","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_6","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_7","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_8","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_9","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_1","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_10","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_2","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_3","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_4","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_5","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_6","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_7","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_8","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_9","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_1","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_10","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_2","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_3","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_4","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_5","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_6","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_7","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_8","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_9","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_1","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_10","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_2","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_3","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_4","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_5","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_6","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_7","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_8","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_9","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_1","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_10","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_2","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_3","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_4","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_5","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_6","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_7","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_8","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_9","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_1","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_10","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_2","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_3","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_4","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_5","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_6","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_7","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_8","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_9","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_1","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_10","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_2","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_3","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_4","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_5","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_6","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_7","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_8","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_9","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_1","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_10","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_2","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_3","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_4","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_5","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_6","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_7","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_8","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_9","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_1","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_10","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_2","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_3","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_4","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_5","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_6","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_7","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_8","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_9","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_1","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_10","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_2","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_3","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_4","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_5","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_6","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_7","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_8","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_9","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_1","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_10","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_2","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_3","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_4","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_5","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_6","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_7","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_8","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_9","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_1","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_10","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_2","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_3","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_4","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_5","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_6","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_7","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_8","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_9","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_1","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_10","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_2","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_3","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_4","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_5","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_6","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_7","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_8","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_9","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_1","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_10","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_2","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_3","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_4","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_5","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_6","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_7","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_8","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_9","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_1","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_10","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_2","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_3","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_4","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_5","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_6","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_7","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_8","score":0.5}
{"chain_id":"3K4J6M3CXES74RFXQAPR431QHPNAGV_1_9","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_1","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_10","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_2","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_3","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_4","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_5","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_6","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_7","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_8","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_9","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_1","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_10","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_2","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_3","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_4","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_5","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_6","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_7","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_8","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_9","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_1","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_10","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_2","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_3","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_4","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_5","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_6","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_7","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_8","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_9","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_1","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_10","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_2","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_3","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_4","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_5","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_6","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_7","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_8","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_9","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_1","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_10","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_2","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_3","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_4","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_5","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_6","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_7","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_8","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_9","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_1","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_10","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_2","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_3","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_4","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_5","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_6","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_7","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_8","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_9","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_1","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_10","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_2","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_3","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_4","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_5","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_6","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_7","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_8","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_9","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_1","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_10","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_2","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_3","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_4","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_5","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_6","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_7","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_8","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_9","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_1","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_10","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_2","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_3","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_4","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_5","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_6","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_7","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_8","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_9","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_1","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_10","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_2","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_3","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_4","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_5","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_6","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_7","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_8","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_9","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_1","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_10","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_2","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_3","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_4","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_5","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_6","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_7","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_8","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_9","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_1","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_10","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_2","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_3","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_4","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_5","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_6","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_7","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_8","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_9","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_1","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_10","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_2","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_3","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_4","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_5","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_6","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_7","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_8","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJD742U5_1_9","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_1","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_10","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_2","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_3","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_4","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_5","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_6","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_7","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_8","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_9","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_1","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_10","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_2","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_3","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_4","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_5","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_6","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_7","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_8","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_9","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_1","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_10","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_2","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_3","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_4","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_5","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_6","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_7","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_8","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_9","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_1","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_10","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_2","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_3","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_4","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_5","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_6","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_7","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_8","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_9","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_1","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_10","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_2","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_3","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_4","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_5","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_6","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_7","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_8","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3FVY495_1_9","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_1","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_10","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_2","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_3","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_4","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_5","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_6","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_7","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_8","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_9","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_1","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_10","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_2","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_3","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_4","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_5","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_6","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_7","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_8","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_9","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_1","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_10","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_2","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_3","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_4","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_5","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_6","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_7","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_8","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_9","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_1","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_10","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_2","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_3","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_4","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_5","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_6","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_7","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_8","score":0.5}
{"chain_id":"3KOPY89HM81HB86DP1VKE8F03NC3JS_1_9","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_1","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_10","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_2","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_3","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_4","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_5","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_6","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_7","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_8","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_9","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_1","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_10","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_2","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_3","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_4","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_5","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_6","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_7","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_8","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_9","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_1","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_10","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_2","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_3","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_4","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_5","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_6","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_7","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_8","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCII05LV_1_9","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_1","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_10","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_2","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_3","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_4","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_5","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_6","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_7","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_8","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_9","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_1","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_10","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_2","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_3","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_4","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_5","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_6","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_7","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_8","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_9","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_1","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_10","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_2","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_3","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_4","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_5","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_6","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_7","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_8","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_9","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_1","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_10","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_2","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_3","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_4","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_5","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_6","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_7","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_8","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_9","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_1","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_10","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_2","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_3","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_4","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_5","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_6","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_7","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_8","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_9","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_1","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_10","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_2","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_3","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_4","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_5","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_6","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_7","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_8","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_9","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_1","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_10","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_2","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_3","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_4","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_5","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_6","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_7","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_8","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_9","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_1","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_10","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_2","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_3","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_4","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_5","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_6","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_7","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_8","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_9","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_1","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_10","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_2","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_3","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_4","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_5","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_6","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_7","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_8","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT46GADC_1_9","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_1","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_10","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_2","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_3","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_4","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_5","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_6","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_7","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_8","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_9","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_1","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_10","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_2","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_3","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_4","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_5","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_6","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_7","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_8","score":0.5}
{"chain_id":"3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_9","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_1","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_10","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_2","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_3","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_4","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_5","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_6","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_7","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_8","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_9","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_1","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_10","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_2","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_3","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_4","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_5","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_6","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_7","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_8","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_9","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_1","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_10","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_2","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_3","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_4","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_5","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_6","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_7","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_8","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_9","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_1","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_10","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_2","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_3","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_4","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_5","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_6","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_7","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_8","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_9","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_1","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_10","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_2","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_3","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_4","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_5","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_6","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_7","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_8","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_9","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_1","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_10","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_2","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_3","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_4","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_5","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_6","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_7","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_8","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_9","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_1","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_10","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_2","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_3","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_4","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_5","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_6","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_7","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_8","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_9","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_1","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_10","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_2","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_3","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_4","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_5","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_6","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_7","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_8","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_9","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_1","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_10","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_2","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_3","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_4","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_5","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_6","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_7","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_8","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_9","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_1","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_10","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_2","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_3","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_4","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_5","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_6","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_7","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_8","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_9","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_1","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_10","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_2","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_3","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_4","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_5","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_6","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_7","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_8","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_9","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_1","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_10","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_2","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_3","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_4","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_5","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_6","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_7","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_8","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_9","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_1","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_10","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_2","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_3","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_4","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_5","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_6","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_7","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_8","score":0.5}
{"chain_id":"3LPW2N6LKT1T334BFJNR07MVUN95UW_1_9","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_1","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_10","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_2","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_3","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_4","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_5","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_6","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_7","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_8","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_9","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_1","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_10","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_2","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_3","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_4","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_5","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_6","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_7","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_8","score":0.5}
{"chain_id":"3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_9","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_1","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_10","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_2","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_3","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_4","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_5","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_6","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_7","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_8","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_9","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_1","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_10","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_2","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_3","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_4","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_5","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_6","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_7","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_8","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_9","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_1","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_10","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_2","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_3","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_4","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_5","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_6","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_7","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_8","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_9","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_1","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_10","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_2","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_3","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_4","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_5","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_6","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_7","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_8","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_9","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_1","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_10","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_2","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_3","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_4","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_5","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_6","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_7","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_8","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_9","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_1","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_10","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_2","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_3","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_4","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_5","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_6","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_7","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_8","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_9","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_1","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_10","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_2","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_3","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_4","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_5","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_6","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_7","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_8","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_9","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_1","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_10","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_2","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_3","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_4","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_5","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_6","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_7","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_8","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_9","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_1","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_10","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_2","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_3","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_4","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_5","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_6","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_7","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_8","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_9","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_1","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_10","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_2","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_3","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_4","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_5","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_6","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_7","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_8","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_9","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_1","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_10","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_2","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_3","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_4","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_5","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_6","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_7","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_8","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_9","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_1","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_10","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_2","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_3","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_4","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_5","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_6","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_7","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_8","score":0.5}
{"chain_id":"3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_9","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_1","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_10","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_2","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_3","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_4","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_5","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_6","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_7","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_8","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBNG06RR_1_9","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_1","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_10","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_2","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_3","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_4","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_5","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_6","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_7","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_8","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_9","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_1","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_10","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_2","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_3","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_4","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_5","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_6","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_7","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_8","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_9","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_1","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_10","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_2","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_3","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_4","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_5","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_6","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_7","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_8","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_9","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_1","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_10","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_2","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_3","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_4","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_5","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_6","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_7","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_8","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_9","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_1","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_10","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_2","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_3","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_4","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_5","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_6","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_7","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_8","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_9","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_1","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_10","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_2","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_3","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_4","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_5","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_6","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_7","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_8","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_9","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_1","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_10","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_2","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_3","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_4","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_5","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_6","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_7","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_8","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_9","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_1","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_10","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_2","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_3","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_4","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_5","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_6","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_7","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_8","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_9","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_1","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_10","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_2","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_3","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_4","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_5","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_6","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_7","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_8","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4725DL0_1_9","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_1","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_10","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_2","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_3","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_4","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_5","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_6","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_7","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_8","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_9","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_1","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_10","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_2","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_3","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_4","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_5","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_6","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_7","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_8","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_9","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_1","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_10","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_2","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_3","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_4","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_5","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_6","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_7","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_8","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_9","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_1","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_10","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_2","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_3","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_4","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_5","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_6","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_7","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_8","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_9","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_1","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_10","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_2","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_3","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_4","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_5","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_6","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_7","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_8","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_9","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_1","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_10","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_2","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_3","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_4","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_5","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_6","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_7","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_8","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_9","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_1","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_10","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_2","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_3","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_4","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_5","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_6","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_7","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_8","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_9","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_1","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_10","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_2","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_3","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_4","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_5","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_6","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_7","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_8","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_9","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_1","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_10","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_2","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_3","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_4","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_5","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_6","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_7","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_8","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_9","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_1","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_10","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_2","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_3","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_4","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_5","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_6","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_7","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_8","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_9","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_1","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_10","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_2","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_3","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_4","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_5","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_6","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_7","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_8","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_9","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_1","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_10","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_2","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_3","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_4","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_5","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_6","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_7","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_8","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_9","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_1","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_10","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_2","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_3","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_4","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_5","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_6","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_7","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_8","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_9","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_1","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_10","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_2","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_3","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_4","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_5","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_6","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_7","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_8","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_9","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_1","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_10","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_2","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_3","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_4","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_5","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_6","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_7","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_8","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_9","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_1","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_10","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_2","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_3","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_4","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_5","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_6","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_7","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_8","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_9","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_1","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_10","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_2","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_3","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_4","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_5","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_6","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_7","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_8","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_9","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_1","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_10","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_2","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_3","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_4","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_5","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_6","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_7","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_8","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_9","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_1","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_10","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_2","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_3","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_4","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_5","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_6","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_7","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_8","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_9","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_1","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_10","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_2","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_3","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_4","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_5","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_6","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_7","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_8","score":0.5}
{"chain_id":"3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_9","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_1","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_10","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_2","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_3","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_4","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_5","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_6","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_7","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_8","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_9","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_1","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_10","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_2","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_3","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_4","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_5","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_6","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_7","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_8","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_9","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_1","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_10","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_2","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_3","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_4","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_5","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_6","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_7","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_8","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_9","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_1","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_10","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_2","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_3","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_4","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_5","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_6","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_7","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_8","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_9","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_1","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_10","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_2","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_3","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_4","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_5","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_6","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_7","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_8","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM7522YVI_1_9","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_1","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_10","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_2","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_3","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_4","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_5","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_6","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_7","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_8","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_9","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_1","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_10","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_2","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_3","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_4","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_5","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_6","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_7","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_8","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_9","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_1","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_10","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_2","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_3","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_4","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_5","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_6","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_7","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_8","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_9","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_1","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_10","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_2","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_3","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_4","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_5","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_6","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_7","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_8","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVY892HY5_1_9","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_1","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_10","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_2","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_3","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_4","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_5","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_6","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_7","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_8","score":0.5}
{"chain_id":"3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_9","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_1","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_10","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_2","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_3","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_4","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_5","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_6","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_7","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_8","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_9","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_1","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_10","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_2","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_3","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_4","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_5","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_6","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_7","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_8","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_9","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_1","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_10","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_2","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_3","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_4","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_5","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_6","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_7","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_8","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_9","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_1","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_10","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_2","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_3","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_4","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_5","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_6","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_7","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_8","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_9","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_1","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_10","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_2","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_3","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_4","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_5","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_6","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_7","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_8","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_9","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_1","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_10","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_2","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_3","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_4","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_5","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_6","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_7","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_8","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_9","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_1","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_10","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_2","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_3","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_4","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_5","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_6","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_7","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_8","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_9","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_1","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_10","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_2","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_3","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_4","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_5","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_6","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_7","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_8","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EEAF4WK_1_9","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_1","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_10","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_2","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_3","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_4","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_5","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_6","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_7","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_8","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EH8SW4O_1_9","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_1","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_10","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_2","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_3","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_4","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_5","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_6","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_7","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_8","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31ESOD4WO_1_9","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_1","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_10","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_2","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_3","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_4","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_5","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_6","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_7","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_8","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_9","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_1","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_10","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_2","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_3","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_4","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_5","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_6","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_7","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_8","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_9","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_1","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_10","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_2","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_3","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_4","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_5","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_6","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_7","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_8","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_9","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_1","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_10","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_2","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_3","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_4","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_5","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_6","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_7","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_8","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_9","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_1","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_10","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_2","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_3","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_4","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_5","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_6","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_7","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_8","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_9","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_1","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_10","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_2","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_3","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_4","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_5","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_6","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_7","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_8","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL963554D0PT_1_9","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_1","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_10","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_2","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_3","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_4","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_5","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_6","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_7","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_8","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_9","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_1","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_10","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_2","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_3","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_4","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_5","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_6","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_7","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_8","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_9","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_1","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_10","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_2","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_3","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_4","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_5","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_6","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_7","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_8","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_9","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_1","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_10","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_2","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_3","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_4","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_5","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_6","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_7","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_8","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVH8636I_1_9","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_1","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_10","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_2","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_3","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_4","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_5","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_6","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_7","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_8","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_9","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_1","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_10","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_2","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_3","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_4","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_5","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_6","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_7","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_8","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_9","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_1","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_10","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_2","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_3","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_4","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_5","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_6","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_7","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_8","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_9","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_1","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_10","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_2","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_3","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_4","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_5","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_6","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_7","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_8","score":0.5}
{"chain_id":"3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_9","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_1","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_10","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_2","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_3","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_4","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_5","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_6","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_7","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_8","score":0.5}
{"chain_id":"3P4RDNWND55W1BOWA427IEHPH73IJ0_1_9","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_1","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_10","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_2","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_3","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_4","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_5","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_6","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_7","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_8","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_9","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_1","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_10","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_2","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_3","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_4","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_5","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_6","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_7","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_8","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_9","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_1","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_10","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_2","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_3","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_4","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_5","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_6","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_7","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_8","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_9","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_1","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_10","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_2","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_3","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_4","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_5","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_6","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_7","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_8","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_9","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_1","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_10","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_2","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_3","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_4","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_5","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_6","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_7","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_8","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_9","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_1","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_10","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_2","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_3","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_4","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_5","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_6","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_7","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_8","score":0.5}
{"chain_id":"3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_9","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_1","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_10","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_2","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_3","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_4","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_5","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_6","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_7","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_8","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_9","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_1","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_10","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_2","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_3","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_4","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_5","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_6","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_7","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_8","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_9","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_1","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_10","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_2","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_3","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_4","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_5","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_6","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_7","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_8","score":0.5}
{"chain_id":"3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_9","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_1","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_10","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_2","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_3","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_4","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_5","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_6","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_7","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_8","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_9","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_1","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_10","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_2","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_3","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_4","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_5","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_6","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_7","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_8","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_9","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_1","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_10","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_2","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_3","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_4","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_5","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_6","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_7","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_8","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_9","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_1","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_10","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_2","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_3","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_4","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_5","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_6","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_7","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_8","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_9","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_1","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_10","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_2","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_3","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_4","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_5","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_6","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_7","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_8","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_9","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_1","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_10","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_2","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_3","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_4","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_5","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_6","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_7","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_8","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_9","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_1","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_10","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_2","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_3","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_4","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_5","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_6","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_7","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_8","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_9","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_1","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_10","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_2","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_3","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_4","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_5","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_6","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_7","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_8","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_9","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_1","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_10","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_2","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_3","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_4","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_5","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_6","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_7","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_8","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_9","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_1","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_10","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_2","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_3","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_4","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_5","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_6","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_7","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_8","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_9","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_1","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_10","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_2","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_3","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_4","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_5","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_6","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_7","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_8","score":0.5}
{"chain_id":"3PQMUDRV7R50604QSMH76D2PI7LII3_1_9","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_1","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_10","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_2","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_3","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_4","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_5","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_6","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_7","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_8","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_9","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_1","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_10","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_2","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_3","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_4","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_5","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_6","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_7","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_8","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_9","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_1","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_10","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_2","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_3","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_4","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_5","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_6","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_7","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_8","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_9","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_1","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_10","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_2","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_3","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_4","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_5","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_6","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_7","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_8","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_9","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_1","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_10","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_2","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_3","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_4","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_5","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_6","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_7","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_8","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_9","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_1","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_10","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_2","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_3","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_4","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_5","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_6","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_7","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_8","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_9","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_1","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_10","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_2","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_3","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_4","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_5","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_6","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_7","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_8","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_9","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_1","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_10","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_2","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_3","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_4","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_5","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_6","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_7","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_8","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_9","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_1","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_10","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_2","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_3","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_4","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_5","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_6","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_7","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_8","score":0.5}
{"chain_id":"3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_9","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_1","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_10","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_2","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_3","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_4","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_5","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_6","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_7","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_8","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_9","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_1","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_10","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_2","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_3","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_4","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_5","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_6","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_7","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_8","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_9","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_1","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_10","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_2","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_3","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_4","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_5","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_6","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_7","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_8","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_9","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_1","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_10","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_2","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_3","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_4","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_5","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_6","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_7","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_8","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_9","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_1","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_10","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_2","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_3","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_4","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_5","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_6","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_7","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_8","score":0.5}
{"chain_id":"3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_9","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_1","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_10","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_2","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_3","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_4","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_5","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_6","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_7","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_8","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_9","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_1","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_10","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_2","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_3","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_4","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_5","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_6","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_7","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_8","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_9","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_1","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_10","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_2","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_3","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_4","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_5","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_6","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_7","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_8","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3097D74_1_9","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_1","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_10","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_2","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_3","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_4","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_5","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_6","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_7","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_8","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3316D7Q_1_9","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_1","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_10","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_2","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_3","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_4","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_5","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_6","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_7","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_8","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_9","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_1","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_10","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_2","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_3","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_4","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_5","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_6","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_7","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_8","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_9","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_1","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_10","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_2","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_3","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_4","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_5","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_6","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_7","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_8","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_9","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_1","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_10","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_2","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_3","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_4","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_5","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_6","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_7","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_8","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_9","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_1","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_10","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_2","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_3","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_4","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_5","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_6","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_7","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_8","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_9","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_1","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_10","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_2","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_3","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_4","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_5","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_6","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_7","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_8","score":0.5}
{"chain_id":"3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_9","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_1","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_10","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_2","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_3","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_4","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_5","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_6","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_7","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_8","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_9","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_1","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_10","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_2","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_3","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_4","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_5","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_6","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_7","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_8","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_9","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_1","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_10","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_2","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_3","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_4","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_5","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_6","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_7","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_8","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_9","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_1","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_10","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_2","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_3","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_4","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_5","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_6","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_7","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_8","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_9","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_1","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_10","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_2","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_3","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_4","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_5","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_6","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_7","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_8","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_9","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_1","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_10","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_2","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_3","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_4","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_5","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_6","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_7","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_8","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_9","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_1","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_10","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_2","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_3","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_4","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_5","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_6","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_7","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_8","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_9","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_1","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_10","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_2","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_3","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_4","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_5","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_6","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_7","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_8","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_9","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_1","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_10","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_2","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_3","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_4","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_5","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_6","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_7","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_8","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_9","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_1","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_10","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_2","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_3","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_4","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_5","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_6","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_7","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_8","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_9","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_1","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_10","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_2","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_3","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_4","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_5","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_6","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_7","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_8","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_9","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_1","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_10","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_2","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_3","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_4","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_5","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_6","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_7","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_8","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_9","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_1","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_10","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_2","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_3","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_4","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_5","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_6","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_7","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_8","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_9","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_1","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_10","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_2","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_3","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_4","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_5","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_6","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_7","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_8","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_9","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_1","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_10","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_2","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_3","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_4","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_5","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_6","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_7","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_8","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_9","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_1","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_10","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_2","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_3","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_4","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_5","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_6","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_7","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_8","score":0.5}
{"chain_id":"3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_9","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_1","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_10","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_2","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_3","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_4","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_5","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_6","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_7","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_8","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_9","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_1","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_10","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_2","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_3","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_4","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_5","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_6","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_7","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_8","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_9","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_1","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_10","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_2","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_3","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_4","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_5","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_6","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_7","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_8","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_9","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_1","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_10","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_2","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_3","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_4","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_5","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_6","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_7","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_8","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_9","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_1","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_10","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_2","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_3","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_4","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_5","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_6","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_7","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_8","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_9","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_1","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_10","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_2","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_3","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_4","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_5","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_6","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_7","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_8","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_9","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_1","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_10","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_2","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_3","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_4","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_5","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_6","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_7","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_8","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_9","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_1","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_10","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_2","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_3","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_4","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_5","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_6","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_7","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_8","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_9","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_1","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_10","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_2","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_3","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_4","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_5","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_6","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_7","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_8","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_9","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_1","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_10","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_2","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_3","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_4","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_5","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_6","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_7","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_8","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_9","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_1","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_10","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_2","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_3","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_4","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_5","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_6","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_7","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_8","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_9","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_1","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_10","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_2","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_3","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_4","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_5","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_6","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_7","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_8","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YORMT051_1_9","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_1","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_10","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_2","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_3","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_4","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_5","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_6","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_7","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_8","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_9","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_1","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_10","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_2","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_3","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_4","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_5","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_6","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_7","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_8","score":0.5}
{"chain_id":"3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_9","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_1","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_10","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_2","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_3","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_4","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_5","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_6","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_7","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_8","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_9","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_1","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_10","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_2","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_3","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_4","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_5","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_6","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_7","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_8","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_9","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_1","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_10","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_2","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_3","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_4","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_5","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_6","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_7","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_8","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_9","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_1","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_10","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_2","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_3","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_4","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_5","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_6","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_7","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_8","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_9","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_1","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_10","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_2","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_3","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_4","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_5","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_6","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_7","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_8","score":0.5}
{"chain_id":"3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_9","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_1","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_10","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_2","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_3","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_4","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_5","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_6","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_7","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_8","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N6B91GF_1_9","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_1","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_10","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_2","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_3","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_4","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_5","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_6","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_7","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_8","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SE7TRPP_1_9","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_1","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_10","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_2","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_3","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_4","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_5","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_6","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_7","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_8","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SF2PRPC_1_9","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_1","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_10","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_2","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_3","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_4","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_5","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_6","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_7","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_8","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SHEMPRX_1_9","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_1","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_10","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_2","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_3","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_4","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_5","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_6","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_7","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_8","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SRFURPJ_1_9","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_1","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_10","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_2","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_3","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_4","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_5","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_6","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_7","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_8","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_9","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_1","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_10","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_2","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_3","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_4","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_5","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_6","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_7","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_8","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_9","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_1","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_10","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_2","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_3","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_4","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_5","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_6","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_7","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_8","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_9","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_1","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_10","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_2","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_3","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_4","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_5","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_6","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_7","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_8","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_9","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_1","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_10","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_2","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_3","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_4","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_5","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_6","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_7","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_8","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_9","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_1","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_10","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_2","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_3","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_4","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_5","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_6","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_7","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_8","score":0.5}
{"chain_id":"3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_9","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_1","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_10","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_2","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_3","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_4","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_5","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_6","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_7","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_8","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_9","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_1","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_10","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_2","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_3","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_4","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_5","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_6","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_7","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_8","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_9","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_1","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_10","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_2","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_3","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_4","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_5","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_6","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_7","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_8","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_9","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_1","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_10","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_2","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_3","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_4","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_5","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_6","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_7","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_8","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_9","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_1","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_10","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_2","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_3","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_4","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_5","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_6","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_7","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_8","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_9","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_1","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_10","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_2","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_3","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_4","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_5","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_6","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_7","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_8","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_9","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_1","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_10","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_2","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_3","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_4","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_5","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_6","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_7","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_8","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_9","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_1","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_10","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_2","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_3","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_4","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_5","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_6","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_7","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_8","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_9","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_1","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_10","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_2","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_3","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_4","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_5","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_6","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_7","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_8","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_9","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_1","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_10","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_2","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_3","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_4","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_5","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_6","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_7","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_8","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_9","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_1","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_10","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_2","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_3","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_4","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_5","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_6","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_7","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_8","score":0.5}
{"chain_id":"3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_9","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_1","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_10","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_2","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_3","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_4","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_5","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_6","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_7","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_8","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_9","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_1","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_10","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_2","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_3","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_4","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_5","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_6","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_7","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_8","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_9","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_1","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_10","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_2","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_3","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_4","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_5","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_6","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_7","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_8","score":0.5}
{"chain_id":"3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_9","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_1","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_10","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_2","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_3","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_4","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_5","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_6","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_7","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_8","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_9","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_1","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_10","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_2","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_3","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_4","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_5","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_6","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_7","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_8","score":0.5}
{"chain_id":"3SNLUL3WO4M75S7W763YHWISIG8ULG_1_9","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_1","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_10","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_2","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_3","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_4","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_5","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_6","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_7","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_8","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_9","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_1","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_10","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_2","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_3","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_4","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_5","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_6","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_7","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_8","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_9","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_1","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_10","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_2","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_3","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_4","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_5","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_6","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_7","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_8","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_9","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_1","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_10","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_2","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_3","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_4","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_5","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_6","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_7","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_8","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_9","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_1","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_10","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_2","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_3","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_4","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_5","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_6","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_7","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_8","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_9","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_1","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_10","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_2","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_3","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_4","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_5","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_6","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_7","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_8","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_9","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_1","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_10","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_2","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_3","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_4","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_5","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_6","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_7","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_8","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_9","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_1","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_10","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_2","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_3","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_4","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_5","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_6","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_7","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_8","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ65FR2Z_1_9","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_1","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_10","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_2","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_3","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_4","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_5","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_6","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_7","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_8","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_9","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_1","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_10","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_2","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_3","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_4","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_5","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_6","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_7","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_8","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_9","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_1","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_10","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_2","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_3","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_4","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_5","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_6","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_7","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_8","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_9","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_1","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_10","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_2","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_3","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_4","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_5","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_6","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_7","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_8","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_9","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_1","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_10","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_2","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_3","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_4","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_5","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_6","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_7","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_8","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_9","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_1","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_10","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_2","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_3","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_4","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_5","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_6","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_7","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_8","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_9","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_1","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_10","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_2","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_3","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_4","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_5","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_6","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_7","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_8","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_9","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_1","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_10","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_2","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_3","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_4","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_5","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_6","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_7","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_8","score":0.5}
{"chain_id":"3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_9","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_1","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_10","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_2","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_3","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_4","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_5","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_6","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_7","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_8","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_9","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_1","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_10","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_2","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_3","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_4","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_5","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_6","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_7","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_8","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_9","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_1","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_10","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_2","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_3","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_4","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_5","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_6","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_7","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_8","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_9","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_1","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_10","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_2","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_3","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_4","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_5","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_6","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_7","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_8","score":0.5}
{"chain_id":"3TR2532VIPTG8RTV83TILBRKD246J3_1_9","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_1","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_10","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_2","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_3","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_4","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_5","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_6","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_7","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_8","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_9","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_1","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_10","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_2","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_3","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_4","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_5","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_6","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_7","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_8","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_9","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_1","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_10","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_2","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_3","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_4","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_5","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_6","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_7","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_8","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_9","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_1","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_10","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_2","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_3","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_4","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_5","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_6","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_7","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_8","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_9","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_1","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_10","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_2","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_3","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_4","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_5","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_6","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_7","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_8","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_9","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_1","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_10","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_2","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_3","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_4","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_5","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_6","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_7","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_8","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_9","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_1","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_10","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_2","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_3","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_4","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_5","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_6","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_7","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_8","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_9","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_1","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_10","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_2","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_3","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_4","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_5","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_6","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_7","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_8","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_9","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_1","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_10","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_2","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_3","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_4","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_5","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_6","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_7","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_8","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_9","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_1","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_10","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_2","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_3","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_4","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_5","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_6","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_7","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_8","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_9","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_1","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_10","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_2","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_3","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_4","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_5","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_6","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_7","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_8","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_9","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_1","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_10","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_2","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_3","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_4","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_5","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_6","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_7","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_8","score":0.5}
{"chain_id":"3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_9","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_1","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_10","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_2","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_3","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_4","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_5","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_6","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_7","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_8","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_9","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_1","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_10","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_2","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_3","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_4","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_5","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_6","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_7","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_8","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_9","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_1","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_10","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_2","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_3","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_4","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_5","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_6","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_7","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_8","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_9","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_1","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_10","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_2","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_3","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_4","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_5","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_6","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_7","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_8","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1YWDW07_1_9","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_1","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_10","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_2","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_3","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_4","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_5","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_6","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_7","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_8","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_9","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_1","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_10","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_2","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_3","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_4","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_5","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_6","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_7","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_8","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WOYB7X_1_9","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_1","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_10","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_2","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_3","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_4","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_5","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_6","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_7","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_8","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSWB73_1_9","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_1","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_10","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_2","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_3","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_4","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_5","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_6","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_7","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_8","score":0.5}
{"chain_id":"3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_9","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_1","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_10","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_2","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_3","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_4","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_5","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_6","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_7","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_8","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_9","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_1","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_10","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_2","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_3","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_4","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_5","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_6","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_7","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_8","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_9","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_1","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_10","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_2","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_3","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_4","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_5","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_6","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_7","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_8","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_9","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_1","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_10","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_2","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_3","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_4","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_5","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_6","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_7","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_8","score":0.5}
{"chain_id":"3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_9","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_1","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_10","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_2","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_3","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_4","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_5","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_6","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_7","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_8","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_9","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_1","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_10","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_2","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_3","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_4","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_5","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_6","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_7","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_8","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_9","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_1","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_10","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_2","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_3","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_4","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_5","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_6","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_7","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_8","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_9","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_1","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_10","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_2","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_3","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_4","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_5","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_6","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_7","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_8","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_9","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_1","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_10","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_2","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_3","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_4","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_5","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_6","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_7","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_8","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_9","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_1","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_10","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_2","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_3","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_4","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_5","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_6","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_7","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_8","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_9","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_1","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_10","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_2","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_3","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_4","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_5","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_6","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_7","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_8","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_9","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_1","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_10","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_2","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_3","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_4","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_5","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_6","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_7","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_8","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_9","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_1","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_10","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_2","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_3","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_4","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_5","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_6","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_7","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_8","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_9","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_1","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_10","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_2","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_3","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_4","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_5","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_6","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_7","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_8","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_9","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_1","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_10","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_2","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_3","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_4","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_5","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_6","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_7","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_8","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_9","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_1","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_10","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_2","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_3","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_4","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_5","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_6","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_7","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_8","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_9","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_1","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_10","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_2","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_3","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_4","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_5","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_6","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_7","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_8","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_9","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_1","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_10","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_2","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_3","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_4","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_5","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_6","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_7","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_8","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_9","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_1","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_10","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_2","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_3","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_4","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_5","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_6","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_7","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_8","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_9","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_1","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_10","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_2","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_3","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_4","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_5","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_6","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_7","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_8","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_9","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_1","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_10","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_2","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_3","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_4","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_5","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_6","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_7","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_8","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_9","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_1","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_10","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_2","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_3","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_4","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_5","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_6","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_7","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_8","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_9","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_1","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_10","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_2","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_3","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_4","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_5","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_6","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_7","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_8","score":0.5}
{"chain_id":"3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_9","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_1","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_10","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_2","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_3","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_4","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_5","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_6","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_7","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_8","score":0.5}
{"chain_id":"3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_9","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_1","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_10","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_2","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_3","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_4","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_5","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_6","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_7","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_8","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74487O77LM_1_9","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_1","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_10","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_2","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_3","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_4","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_5","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_6","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_7","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_8","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN7448LBK7LN_1_9","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_1","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_10","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_2","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_3","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_4","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_5","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_6","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_7","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_8","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_9","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_1","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_10","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_2","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_3","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_4","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_5","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_6","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_7","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_8","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_9","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_1","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_10","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_2","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_3","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_4","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_5","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_6","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_7","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_8","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_9","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_1","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_10","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_2","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_3","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_4","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_5","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_6","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_7","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_8","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_9","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_1","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_10","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_2","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_3","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_4","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_5","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_6","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_7","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_8","score":0.5}
{"chain_id":"3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_9","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_1","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_10","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_2","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_3","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_4","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_5","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_6","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_7","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_8","score":0.5}
{"chain_id":"3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_9","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_1","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_10","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_2","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_3","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_4","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_5","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_6","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_7","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_8","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_9","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_1","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_10","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_2","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_3","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_4","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_5","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_6","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_7","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_8","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_9","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_1","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_10","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_2","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_3","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_4","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_5","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_6","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_7","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_8","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_9","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_1","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_10","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_2","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_3","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_4","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_5","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_6","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_7","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_8","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_9","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_1","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_10","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_2","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_3","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_4","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_5","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_6","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_7","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_8","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_9","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_1","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_10","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_2","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_3","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_4","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_5","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_6","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_7","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_8","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_9","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_1","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_10","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_2","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_3","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_4","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_5","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_6","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_7","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_8","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_9","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_1","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_10","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_2","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_3","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_4","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_5","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_6","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_7","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_8","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_9","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_1","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_10","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_2","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_3","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_4","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_5","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_6","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_7","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_8","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_9","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_1","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_10","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_2","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_3","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_4","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_5","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_6","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_7","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_8","score":0.5}
{"chain_id":"3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_9","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_1","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_10","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_2","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_3","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_4","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_5","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_6","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_7","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_8","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_9","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_1","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_10","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_2","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_3","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_4","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_5","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_6","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_7","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_8","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_9","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_1","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_10","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_2","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_3","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_4","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_5","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_6","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_7","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_8","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_9","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_1","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_10","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_2","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_3","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_4","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_5","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_6","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_7","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_8","score":0.5}
{"chain_id":"3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_9","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_1","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_10","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_2","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_3","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_4","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_5","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_6","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_7","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_8","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_9","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_1","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_10","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_2","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_3","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_4","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_5","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_6","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_7","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_8","score":0.5}
{"chain_id":"3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_9","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_1","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_10","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_2","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_3","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_4","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_5","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_6","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_7","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_8","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_9","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_1","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_10","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_2","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_3","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_4","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_5","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_6","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_7","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_8","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_9","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_1","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_10","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_2","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_3","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_4","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_5","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_6","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_7","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_8","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_9","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_1","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_10","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_2","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_3","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_4","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_5","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_6","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_7","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_8","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_9","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_1","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_10","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_2","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_3","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_4","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_5","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_6","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_7","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_8","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_9","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_1","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_10","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_2","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_3","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_4","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_5","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_6","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_7","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_8","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_9","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_1","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_10","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_2","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_3","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_4","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_5","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_6","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_7","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_8","score":0.5}
{"chain_id":"3WS1NTTKEYB5PELKNOMGXCP147BF09_1_9","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_1","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_10","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_2","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_3","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_4","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_5","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_6","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_7","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_8","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_9","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_1","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_10","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_2","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_3","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_4","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_5","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_6","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_7","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_8","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MJEY6C_1_9","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_1","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_10","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_2","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_3","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_4","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_5","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_6","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_7","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_8","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68NUT6YM_1_9","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_1","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_10","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_2","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_3","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_4","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_5","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_6","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_7","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_8","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_9","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_1","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_10","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_2","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_3","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_4","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_5","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_6","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_7","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_8","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_9","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_1","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_10","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_2","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_3","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_4","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_5","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_6","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_7","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_8","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_9","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_1","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_10","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_2","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_3","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_4","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_5","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_6","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_7","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_8","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_9","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_1","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_10","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_2","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_3","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_4","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_5","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_6","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_7","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_8","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_9","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_1","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_10","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_2","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_3","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_4","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_5","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_6","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_7","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_8","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_9","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_1","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_10","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_2","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_3","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_4","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_5","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_6","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_7","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_8","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_9","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_1","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_10","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_2","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_3","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_4","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_5","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_6","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_7","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_8","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403I028LJ_1_9","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_1","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_10","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_2","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_3","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_4","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_5","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_6","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_7","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_8","score":0.5}
{"chain_id":"3X3OR7WPZZZ97V0J432TL403LR5L8K_1_9","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_1","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_10","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_2","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_3","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_4","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_5","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_6","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_7","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_8","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_9","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_1","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_10","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_2","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_3","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_4","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_5","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_6","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_7","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_8","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_9","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_1","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_10","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_2","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_3","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_4","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_5","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_6","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_7","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_8","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_9","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_1","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_10","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_2","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_3","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_4","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_5","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_6","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_7","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_8","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_9","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_1","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_10","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_2","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_3","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_4","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_5","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_6","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_7","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_8","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_9","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_1","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_10","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_2","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_3","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_4","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_5","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_6","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_7","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_8","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_9","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_1","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_10","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_2","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_3","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_4","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_5","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_6","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_7","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_8","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_9","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_1","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_10","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_2","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_3","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_4","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_5","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_6","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_7","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_8","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_9","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_1","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_10","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_2","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_3","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_4","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_5","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_6","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_7","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_8","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_9","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_1","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_10","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_2","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_3","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_4","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_5","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_6","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_7","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_8","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_9","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_1","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_10","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_2","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_3","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_4","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_5","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_6","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_7","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_8","score":0.5}
{"chain_id":"3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_9","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_1","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_10","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_2","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_3","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_4","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_5","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_6","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_7","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_8","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_9","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_1","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_10","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_2","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_3","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_4","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_5","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_6","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_7","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_8","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_9","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_1","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_10","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_2","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_3","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_4","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_5","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_6","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_7","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_8","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_9","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_1","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_10","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_2","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_3","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_4","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_5","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_6","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_7","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_8","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_9","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_1","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_10","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_2","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_3","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_4","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_5","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_6","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_7","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_8","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_9","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_1","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_10","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_2","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_3","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_4","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_5","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_6","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_7","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_8","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_9","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_1","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_10","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_2","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_3","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_4","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_5","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_6","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_7","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_8","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59354EPE_1_9","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_1","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_10","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_2","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_3","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_4","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_5","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_6","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_7","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_8","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_9","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_1","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_10","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_2","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_3","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_4","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_5","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_6","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_7","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_8","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_9","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_1","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_10","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_2","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_3","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_4","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_5","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_6","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_7","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_8","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_9","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_1","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_10","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_2","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_3","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_4","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_5","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_6","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_7","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_8","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_9","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_1","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_10","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_2","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_3","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_4","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_5","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_6","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_7","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_8","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_9","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_1","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_10","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_2","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_3","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_4","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_5","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_6","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_7","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_8","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_9","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_1","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_10","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_2","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_3","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_4","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_5","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_6","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_7","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_8","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_9","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_1","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_10","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_2","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_3","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_4","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_5","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_6","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_7","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_8","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_9","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_1","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_10","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_2","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_3","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_4","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_5","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_6","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_7","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_8","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_9","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_1","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_10","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_2","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_3","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_4","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_5","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_6","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_7","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_8","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_9","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_1","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_10","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_2","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_3","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_4","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_5","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_6","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_7","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_8","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_9","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_1","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_10","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_2","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_3","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_4","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_5","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_6","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_7","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_8","score":0.5}
{"chain_id":"3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_9","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_1","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_10","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_2","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_3","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_4","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_5","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_6","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_7","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_8","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_9","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_1","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_10","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_2","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_3","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_4","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_5","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_6","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_7","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_8","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_9","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_1","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_10","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_2","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_3","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_4","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_5","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_6","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_7","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_8","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_9","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_1","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_10","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_2","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_3","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_4","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_5","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_6","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_7","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_8","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_9","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_1","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_10","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_2","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_3","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_4","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_5","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_6","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_7","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_8","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_9","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_1","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_10","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_2","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_3","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_4","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_5","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_6","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_7","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_8","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_9","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_1","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_10","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_2","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_3","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_4","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_5","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_6","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_7","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_8","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_9","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_1","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_10","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_2","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_3","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_4","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_5","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_6","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_7","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_8","score":0.5}
{"chain_id":"3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_9","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_1","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_10","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_2","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_3","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_4","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_5","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_6","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_7","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_8","score":0.5}
{"chain_id":"3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_9","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_1","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_10","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_2","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_3","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_4","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_5","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_6","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_7","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_8","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_9","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_1","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_10","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_2","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_3","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_4","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_5","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_6","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_7","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_8","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_9","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_1","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_10","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_2","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_3","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_4","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_5","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_6","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_7","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_8","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W5LOVK9_1_9","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_1","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_10","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_2","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_3","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_4","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_5","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_6","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_7","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_8","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_9","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_1","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_10","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_2","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_3","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_4","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_5","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_6","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_7","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_8","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_9","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_1","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_10","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_2","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_3","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_4","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_5","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_6","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_7","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_8","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_9","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_1","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_10","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_2","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_3","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_4","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_5","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_6","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_7","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_8","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_9","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_1","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_10","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_2","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_3","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_4","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_5","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_6","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_7","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_8","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_9","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_1","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_10","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_2","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_3","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_4","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_5","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_6","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_7","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_8","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_9","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_1","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_10","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_2","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_3","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_4","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_5","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_6","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_7","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_8","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_9","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_1","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_10","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_2","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_3","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_4","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_5","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_6","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_7","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_8","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_9","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_1","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_10","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_2","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_3","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_4","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_5","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_6","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_7","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_8","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_9","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_1","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_10","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_2","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_3","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_4","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_5","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_6","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_7","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_8","score":0.5}
{"chain_id":"3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_9","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_1","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_2","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_3","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_4","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_5","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_6","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_7","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_8","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_1","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_10","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_2","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_3","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_4","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_5","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_6","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_7","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_8","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_9","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_1","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_10","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_2","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_3","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_4","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_5","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_6","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_7","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_8","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_9","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_1","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_10","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_2","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_3","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_4","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_5","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_6","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_7","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_8","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_9","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_1","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_10","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_2","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_3","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_4","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_5","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_6","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_7","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_8","score":0.5}
{"chain_id":"3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_9","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_1","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_10","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_2","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_3","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_4","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_5","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_6","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_7","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_8","score":0.5}
{"chain_id":"3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_9","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_1","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_10","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_2","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_3","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_4","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_5","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_6","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_7","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_8","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_9","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_1","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_10","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_2","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_3","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_4","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_5","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_6","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_7","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_8","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_9","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_1","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_10","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_2","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_3","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_4","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_5","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_6","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_7","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_8","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_9","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_1","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_10","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_2","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_3","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_4","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_5","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_6","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_7","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_8","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KOZCCZS_1_9","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_1","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_10","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_2","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_3","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_4","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_5","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_6","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_7","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_8","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KPELCZW_1_9","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_1","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_10","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_2","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_3","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_4","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_5","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_6","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_7","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_8","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_9","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_1","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_10","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_2","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_3","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_4","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_5","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_6","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_7","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_8","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_9","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_1","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_10","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_2","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_3","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_4","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_5","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_6","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_7","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_8","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_9","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_1","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_10","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_2","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_3","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_4","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_5","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_6","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_7","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_8","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_9","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_1","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_10","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_2","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_3","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_4","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_5","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_6","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_7","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_8","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_9","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_1","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_10","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_2","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_3","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_4","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_5","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_6","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_7","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_8","score":0.5}
{"chain_id":"3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_9","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_1","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_10","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_2","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_3","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_4","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_5","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_6","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_7","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_8","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_9","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_1","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_10","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_2","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_3","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_4","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_5","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_6","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_7","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_8","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_9","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_1","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_10","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_2","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_3","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_4","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_5","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_6","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_7","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_8","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z2540ROG_1_9","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_1","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_10","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_2","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_3","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_4","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_5","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_6","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_7","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_8","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_9","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_1","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_2","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_3","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_4","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_5","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_6","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_1","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_10","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_2","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_3","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_4","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_5","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_6","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_7","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_8","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_9","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_1","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_10","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_2","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_3","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_4","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_5","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_6","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_7","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_8","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_9","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_1","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_10","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_2","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_3","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_4","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_5","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_6","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_7","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_8","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_9","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_1","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_10","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_2","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_3","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_4","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_5","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_6","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_7","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_8","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_9","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_1","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_10","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_2","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_3","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_4","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_5","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_6","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_7","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_8","score":0.5}
{"chain_id":"3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_9","score":0.5}
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/eqasc/data/dummy_predictions_test.jsonl/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/eqasc/data/dummy_predictions_test.jsonl",
"repo_id": "ContextualSP",
"token_count": 351479
}
| 235 |
import unittest
from collections import OrderedDict
from process import process, Process, Conversion, Move, Input, Output
from process.constants import NO_ACTION as NO_ACT, NO_LOCATION as NO_LOC, CREATE, DESTROY, MOVE
class TestProcess(unittest.TestCase):
def test_qa(self):
p = Process(
process_id=514,
locations=OrderedDict([
('glacier', [NO_LOC, NO_LOC, NO_LOC, NO_LOC, NO_LOC, NO_LOC, 'area', 'area']),
('snow', ['area', 'area', 'area', 'area', NO_LOC, NO_LOC, NO_LOC, NO_LOC]),
('mass', [NO_LOC, NO_LOC, NO_LOC, NO_LOC, NO_LOC, 'area', 'area', 'area'])
]),
actions=OrderedDict([
('glacier', [NO_ACT, NO_ACT, NO_ACT, NO_ACT, NO_ACT, CREATE, NO_ACT]),
('snow', [NO_ACT, NO_ACT, NO_ACT, DESTROY, NO_ACT, NO_ACT, NO_ACT]),
('mass', [NO_ACT, NO_ACT, NO_ACT, NO_ACT, CREATE, NO_ACT, NO_ACT])
]),
num_steps=7,
)
self.assertEquals(p.inputs(), [
Input(participants='snow')
])
self.assertEquals(p.outputs(), [
Output(participants='glacier'),
Output(participants='mass')
])
self.assertEquals(p.conversions(), [
Conversion(destroyed='snow', created='mass', locations='area', step_id='4')
])
self.assertEquals(p.moves(), [])
p = Process(
process_id=540,
locations=OrderedDict([
('air', ['unk', 'unk', 'unk', 'bronchiole', 'alveolus', 'unk', 'unk', 'unk', 'unk', 'unk', 'unk']),
('oxygen', ['unk', 'unk', 'unk', 'unk', 'unk', 'bloodstream', 'unk', 'unk', 'unk', 'unk', 'unk']),
('carbon dioxide',
['unk', 'unk', 'unk', 'unk', 'unk', 'bloodstream', 'bloodstream', 'alveolus', 'bronchiole', 'lung',
'body'])
]),
actions=OrderedDict([
('air', [NO_ACT, NO_ACT, MOVE, MOVE, MOVE, NO_ACT, NO_ACT, NO_ACT, NO_ACT, NO_ACT]),
('oxygen', [NO_ACT, NO_ACT, NO_ACT, NO_ACT, MOVE, MOVE, NO_ACT, NO_ACT, NO_ACT, NO_ACT]),
('carbon dioxide', [NO_ACT, NO_ACT, NO_ACT, NO_ACT, MOVE, NO_ACT, MOVE, MOVE, MOVE, MOVE])
]),
num_steps=10,
)
self.assertEquals(p.inputs(), [])
self.assertEquals(p.outputs(), [])
self.assertEquals(p.conversions(), [])
self.assertEquals(p.moves(), [
Move(participants='air', location_before='unk', location_after='bronchiole', step_id='3'),
Move(participants='air', location_before='bronchiole', location_after='alveolus', step_id='4'),
Move(participants='air', location_before='alveolus', location_after='unk', step_id='5'),
Move(participants='oxygen', location_before='unk', location_after='bloodstream', step_id='5'),
Move(participants='oxygen', location_before='bloodstream', location_after='unk', step_id='6'),
Move(participants='carbon dioxide', location_before='unk', location_after='bloodstream', step_id='5'),
Move(participants='carbon dioxide', location_before='bloodstream', location_after='alveolus', step_id='7'),
Move(participants='carbon dioxide', location_before='alveolus', location_after='bronchiole', step_id='8'),
Move(participants='carbon dioxide', location_before='bronchiole', location_after='lung', step_id='9'),
Move(participants='carbon dioxide', location_before='lung', location_after='body', step_id='10'),
])
def test_is_this_action_seq_of_an_input(self):
self.assertFalse(process._is_this_action_seq_of_an_input([NO_ACT, CREATE, DESTROY, NO_ACT]))
self.assertFalse(process._is_this_action_seq_of_an_input([CREATE, DESTROY, NO_ACT, NO_ACT]))
def test_summarize_participants(self):
self.assertEquals('gasoline OR gas', process._summarize_participants('gasoline; gas'))
self.assertEquals('gasoline OR gas', process._summarize_participants('gasoline;gas'))
def test_split_participants(self):
self.assertEquals(['gasoline', 'gas'], process._split_participants('gasoline; gas'))
self.assertEquals(['gasoline', 'gas'], process._split_participants('gasoline;gas'))
if __name__ == '__main__':
unittest.main()
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/process/test_process.py/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/process/test_process.py",
"repo_id": "ContextualSP",
"token_count": 2016
}
| 236 |
import unittest
from text import terms
class TestTerms(unittest.TestCase):
def test_extract_termsets(self):
# one term
self.assertEqual(terms.extract_termsets("dew"), [{'dew'}])
# one term with a word that should not be stemmed
self.assertEqual(terms.extract_termsets("raining"), [{'raining'}])
def test_extract_termsets_with_normalization(self):
# one term
self.assertEqual(terms.extract_termsets_with_normalization("dew"), [{'dew'}])
# one term with a word that should be normalized
self.assertEqual(terms.extract_termsets_with_normalization("raining"), [{'rain'}])
# one term with two words, one that gets normalized
self.assertEqual(terms.extract_termsets_with_normalization("raining cats and dogs"), [{'raining cats and dog'}])
# ANDed terms
self.assertEqual(terms.extract_termsets_with_normalization("dew AND rain"), [{'dew'}, {'rain'}])
# ORed terms
self.assertEqual(terms.extract_termsets_with_normalization("dew OR rain"), [{'dew', 'rain'}])
# ORed and ANDed terms
self.assertEqual(terms.extract_termsets_with_normalization("dew OR rain AND sun"), [{'dew', 'rain'}, {'sun'}])
# more complex arrangements
self.assertEqual(
terms.extract_termsets_with_normalization("dew OR rain AND sun AND foo OR bar OR baz"),
[
{'dew', 'rain'},
{'sun'},
{'foo', 'bar', 'baz'}
]
)
# as above, but "droplet" and "droplets" in the phrase should become one term "droplet"
self.assertEqual(
terms.extract_termsets_with_normalization("dew OR droplet OR droplets AND sun AND foo OR bar OR baz"),
[
{'dew', 'droplet'},
{'sun'},
{'foo', 'bar', 'baz'}
]
)
def test_terms_overlap(self):
self.assertEqual(
terms.terms_overlap(
[{'foo'}],
[{'foo'}]
),
1
)
self.assertEqual(
terms.terms_overlap(
[{'foo'}],
[{'bar'}]
),
0
)
self.assertEqual(
terms.terms_overlap(
[{'diesel'}, {'energi'}],
[{'diesel'}, {'petrol'}]
),
1
)
self.assertEqual(
terms.terms_overlap(
[{'plant', 'anim'}],
[{'soft tissu'}]
),
0
)
self.assertEqual(
terms.terms_overlap(
[{'nitrogen'}],
[{'fixed nitrogen', 'usable nitrogen'}]
),
0
)
self.assertEqual(
terms.terms_overlap(
[{'rain'}, {'water', 'liquid'}],
[{'rain'}, {'water'}]
),
2
)
def test_normalization(self):
self.assertEqual(
terms._normalize_words(["the Raining", "DANCING", "experimenting"]),
["rain", "danc", "experi"]
)
if __name__ == '__main__':
unittest.main()
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/text/test_terms.py/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/text/test_terms.py",
"repo_id": "ContextualSP",
"token_count": 1720
}
| 237 |
This directory contains the training and test files for evaluating predictions,
and a sample prediction file.
The file `train_uniform.jsonl` is the main training data to use for
leaderboard entries (please note that in our paper, we also experiment
with training on an `iid` set (not included here) _**which is not
allowed when submiting to the leaderboard**).
The file `test.jsonl` has the test questions, without labels.
Each example in these files looks like the following:
```json
{
"query": "event: Tom's teeth are crooked ends before he has braces on for a while",
"story": "Tom needed to get braces. He was afraid of them. The dentist assured him everything would be fine. Tom had them on for a while. Once removed he felt it was worth it.",
"label": "contradiction"
}
```
and consists of three fields:
* `query` (or hypothesis)
* `story` (or premise)
* `label` (the inference label; this is absent in `test.jsonl`)
The file `predictions.jsonl` shows an example prediction file for the `uniform`
training split that can be evaluated against `train_uniform.jsonl`.
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/tracie/data/README.md/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/tracie/data/README.md",
"repo_id": "ContextualSP",
"token_count": 295
}
| 238 |
import os
############ General Parameters ##############
gan_alpha=0.8
mode='debug'
project_dir = os.getenv('HOME')#"/home/v-xinyupi/"
code_dir=f"{project_dir}/LogicPretrain/code/GAN-new"
model_name = 'LogiGAN'
corpus_dir=f"{project_dir}/LogicPretrain/Logic_gan_new/data/corpus_gan_new/beta"
model_output_dir=f"{project_dir}/LogicPretrain/models/GAN/{model_name}"
initial_gen_path=#warmpup-generator-dir
initial_ver_path=#warmup-verifier-dir
#'albert-large-v2'#f"{project_dir}/v-wanzho/LogicPretrain/models/bart_checkpoints_warmup/checkpoint-20000/"
max_iter = 15 if mode!='debug' else 1
run_dir=f'run-{model_name}'
os.makedirs(os.path.join(corpus_dir,run_dir),exist_ok=True)
############ Generator Parameters ##############
# PATH
gen_train_src_file="gen_train_src_es.jsonl"
gen_val_src_file=f"{run_dir}/gen_train_iter.jsonl"
gen_train_iter_file=f"{run_dir}/gen_train_iter.jsonl"
if mode=='debug':
gen_train_src_file="gen_train_src_es_toy.jsonl" # DEBGU ONLY
gen_train_src_toy_file="gen_train_src_es_toy.jsonl" # DEBGU ONLY
#gen_val_src_file="gen_valid_toy.jsonl" # DEBGU ONLY
#gen_train_iter_file=f"{run_dir}/gen_train_iter_toy.jsonl" # DEBGU ONLY
unlabeled_gen_train_iter_file=f"{run_dir}/gen_train_iter_unlabeled.jsonl" # Generator adhoc self sampling
unlabeled_ver_train_iter_file=f"{run_dir}/ver_train_iter_unlabled.jsonl" # Generator adhoc inference for gan ver
gen_train_src_path=os.path.join(corpus_dir, gen_train_src_file)
gen_val_src_path=os.path.join(corpus_dir, gen_val_src_file)
gen_train_iter_path=os.path.join(corpus_dir, gen_train_iter_file) # Xinyu: Infered by verifier
unlabeled_gen_train_iter_path=os.path.join(corpus_dir, unlabeled_gen_train_iter_file)
unlabeled_ver_train_iter_path=os.path.join(corpus_dir, unlabeled_ver_train_iter_file)
gen_output_dir=os.path.join(model_output_dir, "gen_checkpoints")
gen_train_samples_per_iter=100000 if mode!='debug' else 100 ### Xinyu: Self-Sampling Size. Default 1e5 i.e., 10% of 100w.
# Trainer
gen_per_device_train_batch_size=# To be adjusted by GPU memory size.
gen_per_device_examples_num=# To be adjusted by GPU memory size. the number of pos+neg per batch e.g., if 1 pos, 5 neg, then it should be 6
gen_per_device_eval_batch_size=# To be adjusted by GPU memory size.
gen_gradient_accumulation_steps=8
gen_learning_rate=5e-5
# Beam search
gen_num_beams=5
num_return_seq=5
gen_max_length=256
gen_min_length=5
gen_length_penalty=4.0
gen_early_stopping=True
gen_no_repeat_ngram_size=3
############ Verifier Parameters ##############
# PATH
ver_train_src_file="ver_train_es.jsonl"
ver_train_iter_file=f"{run_dir}/ver_train_iter.jsonl"
#ver_train_src_file="ver_train_src_toy.jsonl" ## DEBUG ONLY
ver_train_src_path=os.path.join(corpus_dir, ver_train_src_file)
# ver_train_iter_path=os.path.join(corpus_dir, ver_train_iter_file)
ver_train_iter_path=os.path.join(corpus_dir,unlabeled_ver_train_iter_file)
ver_script_path=os.path.join(code_dir, "verifier.py")
ver_output_dir=os.path.join(model_output_dir, "ver_checkpoints")
ver_train_samples_per_iter=80000 if mode!='debug' else 80# Xinyu: Default 2.7e5 i.e., ~10% of 270w
# Trainer
ver_per_device_train_batch_size=# To be adjusted by GPU memory size.
ver_per_device_eval_batch_size=# To be adjusted by GPU memory size.
ver_gradient_accumulation_steps=1
ver_learning_rate=1e-5
############ NLI Labeler Parameters ##############
nli_script_path=os.path.join(code_dir, "labeler.py")
nli_output_dir=os.path.join(model_output_dir, "labeler_checkpoints") # this one is just a placeholder and should be empty.
nli_per_device_eval_batch_size=24
|
ContextualSP/logigan/pre-training/parameters16g_es_corpusb.py/0
|
{
"file_path": "ContextualSP/logigan/pre-training/parameters16g_es_corpusb.py",
"repo_id": "ContextualSP",
"token_count": 1447
}
| 239 |
#!/usr/bin/env bash
## generate sketch
bash ./sketch_prediction/evaluate.sh
## preprocess data for traversal path prediction
python preprocess_hierarchical_inference.py
## generate valid traversal path
python ./traversal_path_prediction/MatchZoo-py/evaluate_esim.py
## evaluate, output accuracy score
python evaluate.py
|
ContextualSP/poset_decoding/evaluate.sh/0
|
{
"file_path": "ContextualSP/poset_decoding/evaluate.sh",
"repo_id": "ContextualSP",
"token_count": 94
}
| 240 |
import torch
import numpy as np
import pandas as pd
import matchzoo as mz
import os
import json
print('matchzoo version', mz.__version__)
split = "mcd1"
data_root = "./data/"
model_path = f"./model/traversal_path_esim-{split}"
task = mz.tasks.Classification(num_classes=2)
task.metrics = ['acc']
print("`classification_task` initialized with metrics", task.metrics)
best_model = sorted(os.listdir(model_path), key=lambda fn: os.path.getmtime(model_path+'/' + fn))[-1]
test_raw = mz.datasets.cfq.load_data(stage='test', task=task, data_root= data_root, suffix="mask_predict_classification.csv")
print('data loaded as `train_pack_raw` `dev_pack_raw` `test_pack_raw`')
# print(model_path, )
preprocessor = mz.load_preprocessor(model_path)
# preprocessor.fit(train_raw)
# train_processed = preprocessor.transform(train_raw)
test_processed = preprocessor.transform(test_raw)
# print(test_processed.frame())
testset = mz.dataloader.Dataset(
data_pack=test_processed,
mode='point',
batch_size=1024,
shuffle = False
)
padding_callback = mz.models.ESIM.get_default_padding_callback()
testloader = mz.dataloader.DataLoader(
dataset=testset,
stage='test',
callback=padding_callback
)
model = mz.models.ESIM()
model.params['task'] = task
model.params['embedding_input_dim'] = preprocessor.context['embedding_input_dim']
model.guess_and_fill_missing_params()
model.build()
model.load_state_dict(torch.load(f"{model_path}/{best_model}"))
optimizer = torch.optim.Adam(model.parameters())
trainer = mz.trainers.Trainer(
model=model,
optimizer=optimizer,
trainloader=testloader,
validloader=testloader,
validate_interval=None,
epochs=50,
save_all = False,
save_dir=model_path,
device=[0,1,2,3,4,5,6,7]
)
# print(trainer.evaluate(testloader))
print(len(testloader.label))
# print(len(pred))
y_pred = trainer.predict(testloader)
open(f"./output/esim-mask-{split}-predict.prob", "w").write(json.dumps(y_pred.tolist()))
y_pred = np.argmax(y_pred, axis=1)
open(f"./output/esim-mask-{split}-predict", "w").write(json.dumps(y_pred.tolist()))
assert len(y_pred) == len(testloader.label)
print(np.sum(y_pred == testloader.label) / float(len(y_pred)))
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/evaluate_esim.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/evaluate_esim.py",
"repo_id": "ContextualSP",
"token_count": 865
}
| 241 |
import numpy as np
import matchzoo as mz
from matchzoo.engine.base_callback import BaseCallback
class Ngram(BaseCallback):
"""
Generate the character n-gram for data.
:param preprocessor: The fitted :class:`BasePreprocessor` object, which
contains the n-gram units information.
:param mode: It can be one of 'index', 'onehot', 'sum' or 'aggregate'.
Example:
>>> import matchzoo as mz
>>> from matchzoo.dataloader.callbacks import Ngram
>>> data = mz.datasets.toy.load_data()
>>> preprocessor = mz.preprocessors.BasicPreprocessor(ngram_size=3)
>>> data = preprocessor.fit_transform(data)
>>> callback = Ngram(preprocessor=preprocessor, mode='index')
>>> dataset = mz.dataloader.Dataset(
... data, callbacks=[callback])
>>> _ = dataset[0]
"""
def __init__(
self,
preprocessor: mz.preprocessors.BasicPreprocessor,
mode: str = 'index'
):
"""Init."""
self._mode = mode
self._word_to_ngram = _build_word_ngram_map(
preprocessor.context['ngram_process_unit'],
preprocessor.context['ngram_vocab_unit'],
preprocessor.context['vocab_unit'].state['index_term'],
mode
)
def on_batch_unpacked(self, x, y):
"""Insert `ngram_left` and `ngram_right` to `x`."""
batch_size = len(x['text_left'])
x['ngram_left'] = [[] for i in range(batch_size)]
x['ngram_right'] = [[] for i in range(batch_size)]
for idx, row in enumerate(x['text_left']):
for term in row:
x['ngram_left'][idx].append(self._word_to_ngram[term])
for idx, row in enumerate(x['text_right']):
for term in row:
x['ngram_right'][idx].append(self._word_to_ngram[term])
if self._mode == 'aggregate':
x['ngram_left'] = [list(np.sum(row, axis=0))
for row in x['ngram_left']]
x['ngram_right'] = [list(np.sum(row, axis=0))
for row in x['ngram_right']]
x['text_left'] = x['ngram_left']
x['text_right'] = x['ngram_right']
def _build_word_ngram_map(
ngram_process_unit: mz.preprocessors.units.NgramLetter,
ngram_vocab_unit: mz.preprocessors.units.Vocabulary,
index_term: dict,
mode: str = 'index'
) -> dict:
"""
Generate the word to ngram vector mapping.
:param ngram_process_unit: The fitted :class:`NgramLetter` object.
:param ngram_vocab_unit: The fitted :class:`Vocabulary` object.
:param index_term: The index to term mapping dict.
:param mode: It be one of 'index', 'onehot', 'sum' or 'aggregate'.
:return: the word to ngram vector mapping.
"""
word_to_ngram = {}
ngram_size = len(ngram_vocab_unit.state['index_term'])
for idx, word in index_term.items():
if idx == 0:
continue
elif idx == 1: # OOV
word_ngram = [1]
else:
ngrams = ngram_process_unit.transform([word])
word_ngram = ngram_vocab_unit.transform(ngrams)
num_ngrams = len(word_ngram)
if mode == 'index':
word_to_ngram[idx] = word_ngram
elif mode == 'onehot':
onehot = np.zeros((num_ngrams, ngram_size))
onehot[np.arange(num_ngrams), word_ngram] = 1
word_to_ngram[idx] = onehot
elif mode == 'sum' or mode == 'aggregate':
onehot = np.zeros((num_ngrams, ngram_size))
onehot[np.arange(num_ngrams), word_ngram] = 1
sum_vector = np.sum(onehot, axis=0)
word_to_ngram[idx] = sum_vector
else:
raise ValueError(f'mode error, it should be one of `index`, '
f'`onehot`, `sum` or `aggregate`.'
)
return word_to_ngram
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/dataloader/callbacks/ngram.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/dataloader/callbacks/ngram.py",
"repo_id": "ContextualSP",
"token_count": 1888
}
| 242 |
"""GloVe Embedding data loader."""
from pathlib import Path
import matchzoo as mz
_glove_embedding_url = "http://nlp.stanford.edu/data/glove.6B.zip"
def load_glove_embedding(dimension: int = 50) -> mz.embedding.Embedding:
"""
Return the pretrained glove embedding.
:param dimension: the size of embedding dimension, the value can only be
50, 100, or 300.
:return: The :class:`mz.embedding.Embedding` object.
"""
file_name = 'glove.6B.' + str(dimension) + 'd.txt'
file_path = (Path(mz.USER_DATA_DIR) / 'glove').joinpath(file_name)
if not file_path.exists():
mz.utils.get_file('glove_embedding',
_glove_embedding_url,
extract=True,
cache_dir=mz.USER_DATA_DIR,
cache_subdir='glove')
return mz.embedding.load_from_file(file_path=str(file_path), mode='glove')
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/datasets/embeddings/load_glove_embedding.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/datasets/embeddings/load_glove_embedding.py",
"repo_id": "ContextualSP",
"token_count": 433
}
| 243 |
"""Metric base class and some related utilities."""
import abc
import numpy as np
class BaseMetric(abc.ABC):
"""Metric base class."""
ALIAS = 'base_metric'
@abc.abstractmethod
def __call__(self, y_true: np.array, y_pred: np.array) -> float:
"""
Call to compute the metric.
:param y_true: An array of groud truth labels.
:param y_pred: An array of predicted values.
:return: Evaluation of the metric.
"""
@abc.abstractmethod
def __repr__(self):
""":return: Formated string representation of the metric."""
def __eq__(self, other):
""":return: `True` if two metrics are equal, `False` otherwise."""
return (type(self) is type(other)) and (vars(self) == vars(other))
def __hash__(self):
""":return: Hashing value using the metric as `str`."""
return str(self).__hash__()
class RankingMetric(BaseMetric):
"""Ranking metric base class."""
ALIAS = 'ranking_metric'
class ClassificationMetric(BaseMetric):
"""Rangking metric base class."""
ALIAS = 'classification_metric'
def sort_and_couple(labels: np.array, scores: np.array) -> np.array:
"""Zip the `labels` with `scores` into a single list."""
couple = list(zip(labels, scores))
return np.array(sorted(couple, key=lambda x: x[1], reverse=True))
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/engine/base_metric.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/engine/base_metric.py",
"repo_id": "ContextualSP",
"token_count": 523
}
| 244 |
"""Mean reciprocal ranking metric."""
import numpy as np
from matchzoo.engine.base_metric import (
BaseMetric, sort_and_couple, RankingMetric
)
class MeanReciprocalRank(RankingMetric):
"""Mean reciprocal rank metric."""
ALIAS = ['mean_reciprocal_rank', 'mrr']
def __init__(self, threshold: float = 0.):
"""
:class:`MeanReciprocalRankMetric`.
:param threshold: The label threshold of relevance degree.
"""
self._threshold = threshold
def __repr__(self) -> str:
""":return: Formated string representation of the metric."""
return f'{self.ALIAS[0]}({self._threshold})'
def __call__(self, y_true: np.array, y_pred: np.array) -> float:
"""
Calculate reciprocal of the rank of the first relevant item.
Example:
>>> import numpy as np
>>> y_pred = np.asarray([0.2, 0.3, 0.7, 1.0])
>>> y_true = np.asarray([1, 0, 0, 0])
>>> MeanReciprocalRank()(y_true, y_pred)
0.25
:param y_true: The ground true label of each document.
:param y_pred: The predicted scores of each document.
:return: Mean reciprocal rank.
"""
coupled_pair = sort_and_couple(y_true, y_pred)
for idx, (label, pred) in enumerate(coupled_pair):
if label > self._threshold:
return 1. / (idx + 1)
return 0.
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/metrics/mean_reciprocal_rank.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/metrics/mean_reciprocal_rank.py",
"repo_id": "ContextualSP",
"token_count": 623
}
| 245 |
"""An implementation of DSSM, Deep Structured Semantic Model."""
import typing
import torch
import torch.nn.functional as F
from matchzoo import preprocessors
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine.param import Param
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.base_preprocessor import BasePreprocessor
class DSSM(BaseModel):
"""
Deep structured semantic model.
Examples:
>>> model = DSSM()
>>> model.params['mlp_num_layers'] = 3
>>> model.params['mlp_num_units'] = 300
>>> model.params['mlp_num_fan_out'] = 128
>>> model.params['mlp_activation_func'] = 'relu'
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_multi_layer_perceptron=True)
params.add(Param(name='vocab_size', value=419,
desc="Size of vocabulary."))
return params
@classmethod
def get_default_preprocessor(
cls,
truncated_mode: str = 'pre',
truncated_length_left: typing.Optional[int] = None,
truncated_length_right: typing.Optional[int] = None,
filter_mode: str = 'df',
filter_low_freq: float = 1,
filter_high_freq: float = float('inf'),
remove_stop_words: bool = False,
ngram_size: typing.Optional[int] = 3,
) -> BasePreprocessor:
"""
Model default preprocessor.
The preprocessor's transform should produce a correctly shaped data
pack that can be used for training.
:return: Default preprocessor.
"""
return preprocessors.BasicPreprocessor(
truncated_mode=truncated_mode,
truncated_length_left=truncated_length_left,
truncated_length_right=truncated_length_right,
filter_mode=filter_mode,
filter_low_freq=filter_low_freq,
filter_high_freq=filter_high_freq,
remove_stop_words=remove_stop_words,
ngram_size=ngram_size
)
@classmethod
def get_default_padding_callback(cls):
""":return: Default padding callback."""
return None
def build(self):
"""
Build model structure.
DSSM use Siamese arthitecture.
"""
self.mlp_left = self._make_multi_layer_perceptron_layer(
self._params['vocab_size']
)
self.mlp_right = self._make_multi_layer_perceptron_layer(
self._params['vocab_size']
)
self.out = self._make_output_layer(1)
def forward(self, inputs):
"""Forward."""
# Process left & right input.
input_left, input_right = inputs['ngram_left'], inputs['ngram_right']
input_left = self.mlp_left(input_left)
input_right = self.mlp_right(input_right)
# Dot product with cosine similarity.
x = F.cosine_similarity(input_left, input_right)
out = self.out(x.unsqueeze(dim=1))
return out
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/models/dssm.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/models/dssm.py",
"repo_id": "ContextualSP",
"token_count": 1366
}
| 246 |
import torch.nn as nn
class RNNDropout(nn.Dropout):
"""Dropout for RNN."""
def forward(self, sequences_batch):
"""Masking whole hidden vector for tokens."""
# B: batch size
# L: sequence length
# D: hidden size
# sequence_batch: BxLxD
ones = sequences_batch.data.new_ones(sequences_batch.shape[0],
sequences_batch.shape[-1])
dropout_mask = nn.functional.dropout(ones, self.p, self.training,
inplace=False)
return dropout_mask.unsqueeze(1) * sequences_batch
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/modules/dropout.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/modules/dropout.py",
"repo_id": "ContextualSP",
"token_count": 310
}
| 247 |
import numpy as np
from .unit import Unit
class CharacterIndex(Unit):
"""
CharacterIndexUnit for DIIN model.
The input of :class:'CharacterIndexUnit' should be a list of word
character list extracted from a text. The output is the character
index representation of this text.
:class:`NgramLetterUnit` and :class:`VocabularyUnit` are two
essential prerequisite of :class:`CharacterIndexUnit`.
Examples:
>>> input_ = [['#', 'a', '#'],['#', 'o', 'n', 'e', '#']]
>>> character_index = CharacterIndex(
... char_index={
... '<PAD>': 0, '<OOV>': 1, 'a': 2, 'n': 3, 'e':4, '#':5})
>>> index = character_index.transform(input_)
>>> index
[[5, 2, 5], [5, 1, 3, 4, 5]]
"""
def __init__(
self,
char_index: dict,
):
"""
Class initialization.
:param char_index: character-index mapping generated by
:class:'VocabularyUnit'.
"""
self._char_index = char_index
def transform(self, input_: list) -> list:
"""
Transform list of characters to corresponding indices.
:param input_: list of characters generated by
:class:'NgramLetterUnit'.
:return: character index representation of a text.
"""
idx = []
for i in range(len(input_)):
current = [
self._char_index.get(input_[i][j], 1)
for j in range(len(input_[i]))]
idx.append(current)
return idx
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/character_index.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/character_index.py",
"repo_id": "ContextualSP",
"token_count": 683
}
| 248 |
import collections
import numpy as np
from .unit import Unit
class WordHashing(Unit):
"""
Word-hashing layer for DSSM-based models.
The input of :class:`WordHashingUnit` should be a list of word
sub-letter list extracted from one document. The output of is
the word-hashing representation of this document.
:class:`NgramLetterUnit` and :class:`VocabularyUnit` are two
essential prerequisite of :class:`WordHashingUnit`.
Examples:
>>> letters = [['#te', 'tes','est', 'st#'], ['oov']]
>>> word_hashing = WordHashing(
... term_index={
... '_PAD': 0, 'OOV': 1, 'st#': 2, '#te': 3, 'est': 4, 'tes': 5
... })
>>> hashing = word_hashing.transform(letters)
>>> hashing[0]
[0.0, 0.0, 1.0, 1.0, 1.0, 1.0]
>>> hashing[1]
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0]
"""
def __init__(
self,
term_index: dict,
):
"""
Class initialization.
:param term_index: term-index mapping generated by
:class:`VocabularyUnit`.
:param dim_triletter: dimensionality of tri_leltters.
"""
self._term_index = term_index
def transform(self, input_: list) -> list:
"""
Transform list of :attr:`letters` into word hashing layer.
:param input_: list of `tri_letters` generated by
:class:`NgramLetterUnit`.
:return: Word hashing representation of `tri-letters`.
"""
if any([isinstance(elem, list) for elem in input_]):
# The input shape for CDSSM is
# [[word1 ngram, ngram], [word2, ngram, ngram], ...].
hashing = np.zeros((len(input_), len(self._term_index)))
for idx, word in enumerate(input_):
counted_letters = collections.Counter(word)
for key, value in counted_letters.items():
letter_id = self._term_index.get(key, 1)
hashing[idx, letter_id] = value
else:
# The input shape for DSSM model [ngram, ngram, ...].
hashing = np.zeros(len(self._term_index))
counted_letters = collections.Counter(input_)
for key, value in counted_letters.items():
letter_id = self._term_index.get(key, 1)
hashing[letter_id] = value
return hashing.tolist()
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/word_hashing.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/word_hashing.py",
"repo_id": "ContextualSP",
"token_count": 1095
}
| 249 |
[pytest]
markers =
cron: marks tests as cron (deselect with '-m "not cron"')
slow: marks tests as slow (deselect with '-m "not slow"')
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/pytest.ini/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/pytest.ini",
"repo_id": "ContextualSP",
"token_count": 58
}
| 250 |
import pytest
import matchzoo as mz
@pytest.fixture
def term_index():
return {'G': 1, 'C': 2, 'D': 3, 'A': 4, '_PAD': 0}
def test_embedding(term_index):
embed = mz.embedding.load_from_file(mz.datasets.embeddings.EMBED_RANK)
matrix = embed.build_matrix(term_index)
assert matrix.shape == (len(term_index), 50)
embed = mz.embedding.load_from_file(mz.datasets.embeddings.EMBED_10_GLOVE,
mode='glove')
matrix = embed.build_matrix(term_index)
assert matrix.shape == (len(term_index), 10)
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/tests/test_embedding.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/tests/test_embedding.py",
"repo_id": "ContextualSP",
"token_count": 262
}
| 251 |
<jupyter_start><jupyter_code>import torch
import numpy as np
import pandas as pd
import matchzoo as mz
print('matchzoo version', mz.__version__)
ranking_task = mz.tasks.Ranking(losses=mz.losses.RankHingeLoss())
ranking_task.metrics = [
mz.metrics.NormalizedDiscountedCumulativeGain(k=3),
mz.metrics.NormalizedDiscountedCumulativeGain(k=5),
mz.metrics.MeanAveragePrecision()
]
print("`ranking_task` initialized with metrics", ranking_task.metrics)
print('data loading ...')
train_pack_raw = mz.datasets.wiki_qa.load_data('train', task=ranking_task)
dev_pack_raw = mz.datasets.wiki_qa.load_data('dev', task=ranking_task, filtered=True)
test_pack_raw = mz.datasets.wiki_qa.load_data('test', task=ranking_task, filtered=True)
print('data loaded as `train_pack_raw` `dev_pack_raw` `test_pack_raw`')
preprocessor = mz.preprocessors.BasicPreprocessor(
truncated_length_left = 10,
truncated_length_right = 100,
filter_low_freq = 2
)
train_pack_processed = preprocessor.fit_transform(train_pack_raw)
dev_pack_processed = preprocessor.transform(dev_pack_raw)
test_pack_processed = preprocessor.transform(test_pack_raw)
preprocessor.context
glove_embedding = mz.datasets.embeddings.load_glove_embedding(dimension=100)
term_index = preprocessor.context['vocab_unit'].state['term_index']
embedding_matrix = glove_embedding.build_matrix(term_index)
l2_norm = np.sqrt((embedding_matrix * embedding_matrix).sum(axis=1))
embedding_matrix = embedding_matrix / l2_norm[:, np.newaxis]
trainset = mz.dataloader.Dataset(
data_pack=train_pack_processed,
mode='pair',
num_dup=2,
num_neg=1,
batch_size=20,
resample=True,
sort=False
)
testset = mz.dataloader.Dataset(
data_pack=test_pack_processed,
batch_size=20
)
padding_callback = mz.models.DRMMTKS.get_default_padding_callback()
trainloader = mz.dataloader.DataLoader(
dataset=trainset,
stage='train',
callback=padding_callback
)
testloader = mz.dataloader.DataLoader(
dataset=testset,
stage='dev',
callback=padding_callback
)
model = mz.models.DRMMTKS()
model.params['task'] = ranking_task
model.params['embedding'] = embedding_matrix
model.params['mask_value'] = 0
model.params['top_k'] = 10
model.params['mlp_activation_func'] = 'tanh'
model.build()
print(model)
print('Trainable params: ', sum(p.numel() for p in model.parameters() if p.requires_grad))
optimizer = torch.optim.Adadelta(model.parameters())
trainer = mz.trainers.Trainer(
model=model,
optimizer=optimizer,
trainloader=trainloader,
validloader=testloader,
validate_interval=None,
epochs=10
)
trainer.run()<jupyter_output><empty_output>
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/tutorials/ranking/drmmtks.ipynb/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/tutorials/ranking/drmmtks.ipynb",
"repo_id": "ContextualSP",
"token_count": 1026
}
| 252 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""
Mainly borrowed from `allennlp.data.fields.production_rule_field.py` to support tree-level copy
Author: Qian Liu
"""
from typing import Dict, List, Optional, NamedTuple
import torch
from overrides import overrides
from allennlp.data.fields.field import Field
from allennlp.data.vocabulary import Vocabulary
class CopyProductionRule(NamedTuple):
rule: str
is_global_rule: bool
is_copy_rule: bool
rule_id: Optional[torch.LongTensor] = None
nonterminal: Optional[str] = None
# This is just here for backward compatability.
ProductionRuleArray = CopyProductionRule
# mypy doesn't like that we're using a crazy data type - the data type we use here is _supposed_ to
# be in the bounds of DataArray, but ProductionRule definitely isn't. TODO(mattg): maybe we
# should find a better way to loosen those bounds, or let people extend them. E.g., we could have
# DataArray be a class, and let people subclass it, or something.
class CopyProductionRuleField(Field[CopyProductionRule]): # type: ignore
"""
This ``Field`` represents a production rule from a grammar, like "S -> [NP, VP]", "N -> John",
or "<b,c> -> [<a,<b,c>>, a]".
We assume a few things about how these rules are formatted:
- There is a left-hand side (LHS) and a right-hand side (RHS), where the LHS is always a
non-terminal, and the RHS is either a terminal, a non-terminal, or a sequence of
terminals and/or non-terminals.
- The LHS and the RHS are joined by " -> ", and this sequence of characters appears nowhere
else in the rule.
- Non-terminal sequences in the RHS are formatted as "[NT1, NT2, ...]".
- Some rules come from a global grammar used for a whole dataset, while other rules are
specific to a particular ``Instance``.
We don't make use of most of these assumptions in this class, but the code that consumes this
``Field`` relies heavily on them in some places.
If the given rule is in the global grammar, we treat the rule as a vocabulary item that will
get an index and (in the model) an embedding. If the rule is not in the global grammar, we do
not create a vocabulary item from the rule, and don't produce a tensor for the rule - we assume
the model will handle representing this rule in some other way.
Because we represent global grammar rules and instance-specific rules differently, this
``Field`` does not lend itself well to batching its arrays, even in a sequence for a single
training instance. A model using this field will have to manually batch together rule
representations after splitting apart the global rules from the ``Instance`` rules.
In a model, this will get represented as a ``ProductionRule``, which is defined above.
This is a namedtuple of ``(rule_string, is_global_rule, [rule_id], nonterminal)``, where the
``rule_id`` ``Tensor``, if present, will have shape ``(1,)``. We don't do any batching of the
``Tensors``, so this gets passed to ``Model.forward()`` as a ``List[ProductionRule]``. We
pass along the rule string because there isn't another way to recover it for instance-specific
rules that do not make it into the vocabulary.
Parameters
----------
rule : ``str``
The production rule, formatted as described above. If this field is just padding, ``rule``
will be the empty string.
is_global_rule : ``bool``
Whether this rule comes from the global grammar or is an instance-specific production rule.
vocab_namespace : ``str``, optional (default="rule_labels")
The vocabulary namespace to use for the global production rules. We use "rule_labels" by
default, because we typically do not want padding and OOV tokens for these, and ending the
namespace with "labels" means we don't get padding and OOV tokens.
nonterminal : ``str``, optional, default = None
The left hand side of the rule. Sometimes having this as separate part of the ``ProductionRule``
can deduplicate work.
"""
def __init__(self,
rule: str,
is_global_rule: bool,
is_copy_rule: bool,
vocab_namespace: str = 'rule_labels',
nonterminal: str = None) -> None:
self.rule = rule
self.nonterminal = nonterminal
self.is_global_rule = is_global_rule
self.is_copy_rule = is_copy_rule
self._vocab_namespace = vocab_namespace
self._rule_id: int = None
@overrides
def count_vocab_items(self, counter: Dict[str, Dict[str, int]]):
if self.is_global_rule:
counter[self._vocab_namespace][self.rule] += 1
@overrides
def index(self, vocab: Vocabulary):
if self.is_global_rule and self._rule_id is None:
self._rule_id = vocab.get_token_index(self.rule, self._vocab_namespace)
@overrides
def get_padding_lengths(self) -> Dict[str, int]:
# pylint: disable=no-self-use
return {}
@overrides
def as_tensor(self, padding_lengths: Dict[str, int]) -> CopyProductionRule:
# pylint: disable=unused-argument
if self.is_global_rule:
tensor = torch.LongTensor([self._rule_id])
else:
tensor = None
return CopyProductionRule(self.rule, self.is_global_rule, self.is_copy_rule, tensor, self.nonterminal)
@overrides
def empty_field(self): # pylint: disable=no-self-use
# This _does_ get called, because we don't want to bother with modifying the ListField to
# ignore padding for these. We just make sure the rule is the empty string, which the
# model will use to know that this rule is just padding.
return CopyProductionRuleField(rule='', is_global_rule=False, is_copy_rule=False)
@overrides
def batch_tensors(self, tensor_list: List[CopyProductionRule]) -> List[CopyProductionRule]: # type: ignore
# pylint: disable=no-self-use
return tensor_list
def __str__(self) -> str:
return f"ProductionRuleField with rule: {self.rule} (is_global_rule: " \
f"{self.is_global_rule}, is_copy_rule: {self.is_copy_rule})" \
f"in namespace: '{self._vocab_namespace}'.'"
|
ContextualSP/semantic_parsing_in_context/context/copy_production_rule_field.py/0
|
{
"file_path": "ContextualSP/semantic_parsing_in_context/context/copy_production_rule_field.py",
"repo_id": "ContextualSP",
"token_count": 2236
}
| 253 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import logging
from typing import Dict, List, Tuple
import torch
import statistics
from allennlp.nn import util
from allennlp.state_machines.constrained_beam_search import ConstrainedBeamSearch
from allennlp.state_machines.states import State
from allennlp.state_machines.trainers.decoder_trainer import DecoderTrainer
from allennlp.state_machines.transition_functions import TransitionFunction
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class MaximumMarginalLikelihood(DecoderTrainer[Tuple[torch.Tensor, torch.Tensor]]):
"""
This class trains a decoder by maximizing the marginal likelihood of the targets. That is,
during training, we are given a `set` of acceptable or possible target sequences, and we
optimize the `sum` of the probability the model assigns to each item in the set. This allows
the model to distribute its probability mass over the set however it chooses, without forcing
`all` of the given target sequences to have high probability. This is helpful, for example, if
you have good reason to expect that the correct target sequence is in the set, but aren't sure
`which` of the sequences is actually correct.
This implementation of maximum marginal likelihood requires the model you use to be `locally
normalized`; that is, at each decoding timestep, we assume that the model creates a normalized
probability distribution over actions. This assumption is necessary, because we do no explicit
normalization in our loss function, we just sum the probabilities assigned to all correct
target sequences, relying on the local normalization at each time step to push probability mass
from bad actions to good ones.
Parameters
----------
beam_size : ``int``, optional (default=None)
We can optionally run a constrained beam search over the provided targets during decoding.
This narrows the set of transition sequences that are marginalized over in the loss
function, keeping only the top ``beam_size`` sequences according to the model. If this is
``None``, we will keep all of the provided sequences in the loss computation.
"""
def __init__(self, beam_size: int = None, re_weight: bool = False, loss_mask: int = 6) -> None:
self._beam_size = beam_size
self._re_weight = re_weight
# mask the loss to not back-propagate
self._loss_mask = loss_mask
def decode(self,
initial_state: State,
transition_function: TransitionFunction,
supervision: Tuple[torch.Tensor, torch.Tensor]) -> Dict[str, torch.Tensor]:
targets, target_mask = supervision
# batch_size x inter_size x action_size x index_size(no use)
assert len(targets.size()) == 4
# -> batch_size * inter_size x action_size
batch_size, inter_size, _, _ = targets.size()
# TODO: we must keep the shape because the loss_mask
targets = targets.reshape(batch_size * inter_size, -1)
target_mask = target_mask.reshape(batch_size * inter_size, -1)
inter_mask = target_mask.sum(dim=1).ne(0)
# un squeeze beam search dimension
targets = targets.unsqueeze(dim=1)
target_mask = target_mask.unsqueeze(dim=1)
beam_search = ConstrainedBeamSearch(self._beam_size, targets, target_mask)
finished_states: Dict[int, List[State]] = beam_search.search(initial_state, transition_function)
inter_count = inter_mask.view(batch_size, inter_size).sum(dim=0).float()
if 0 not in inter_count:
inter_ratio = 1.0 / inter_count
else:
inter_ratio = torch.ones_like(inter_count)
loss = 0
for iter_ind, instance_states in finished_states.items():
scores = [state.score[0].view(-1) for state in instance_states]
lens = [len(state.action_history[0]) for state in instance_states]
if not len(lens):
continue
# the i-round of an interaction, starting from 0
cur_inter = iter_ind % inter_size
if self._re_weight:
loss_coefficient = inter_ratio[cur_inter]
else:
loss_coefficient = 1.0
if self._loss_mask <= cur_inter:
continue
cur_loss = - util.logsumexp(torch.cat(scores)) / statistics.mean(lens)
loss += loss_coefficient * cur_loss
if self._re_weight:
return {'loss': loss / len(inter_count)}
elif self._loss_mask < inter_size:
valid_counts = inter_count[:self._loss_mask].sum()
return {'loss': loss / valid_counts}
else:
return {'loss': loss / len(finished_states)}
|
ContextualSP/semantic_parsing_in_context/models/decode_trainer.py/0
|
{
"file_path": "ContextualSP/semantic_parsing_in_context/models/decode_trainer.py",
"repo_id": "ContextualSP",
"token_count": 1766
}
| 254 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
from scripts.eval.evaluation_sqa import Evaluator, build_valid_col_units, rebuild_sql_val, rebuild_sql_col, \
build_foreign_key_map_from_json
from scripts.eval.process_sql import Schema, get_schema, get_sql
_schemas = {}
kmaps = None
def evaluate(gold, predict, db_name, db_dir, table) -> int:
global kmaps
# try:
evaluator = Evaluator()
if kmaps is None:
kmaps = build_foreign_key_map_from_json(table)
if db_name in _schemas:
schema = _schemas[db_name]
else:
db = os.path.join(db_dir, db_name, db_name + ".sqlite")
schema = _schemas[db_name] = Schema(get_schema(db))
g_sql = get_sql(schema, gold)
try:
p_sql = get_sql(schema, predict)
except Exception as e:
return 0
# rebuild sql for value evaluation
kmap = kmaps[db_name]
g_valid_col_units = build_valid_col_units(g_sql['from']['table_units'], schema)
g_sql = rebuild_sql_val(g_sql)
g_sql = rebuild_sql_col(g_valid_col_units, g_sql, kmap)
p_valid_col_units = build_valid_col_units(p_sql['from']['table_units'], schema)
p_sql = rebuild_sql_val(p_sql)
p_sql = rebuild_sql_col(p_valid_col_units, p_sql, kmap)
exact_score = evaluator.eval_exact_match(p_sql, g_sql)
return exact_score
|
ContextualSP/semantic_parsing_in_context/scripts/sparc_evaluate.py/0
|
{
"file_path": "ContextualSP/semantic_parsing_in_context/scripts/sparc_evaluate.py",
"repo_id": "ContextualSP",
"token_count": 577
}
| 255 |
"""
Utility functions for reading the standardised text2sql datasets presented in
`"Improving Text to SQL Evaluation Methodology" <https://arxiv.org/abs/1806.09029>`_
"""
import json
import os
import sqlite3
from collections import defaultdict
from typing import List, Dict, Optional, Any
from semparse.sql.process_sql import get_tables_with_alias, parse_sql
class TableColumn:
def __init__(self,
name: str,
text: str,
column_type: str,
is_primary_key: bool,
foreign_key: Optional[str],
lemma: Optional[str]):
self.name = name
self.text = text
self.column_type = column_type
self.is_primary_key = is_primary_key
self.foreign_key = foreign_key
self.lemma = lemma
class Table:
def __init__(self,
name: str,
text: str,
columns: List[TableColumn],
lemma: Optional[str]):
self.name = name
self.text = text
self.columns = columns
self.lemma = lemma
def read_dataset_schema(schema_path: str, stanza_model=None) -> Dict[str, List[Table]]:
schemas: Dict[str, Dict[str, Table]] = defaultdict(dict)
dbs_json_blob = json.load(open(schema_path, "r", encoding='utf-8'))
for db in dbs_json_blob:
db_id = db['db_id']
column_id_to_table = {}
column_id_to_column = {}
concate_columns = [c[-1] for c in db['column_names']]
concate_tables = [c for c in db['table_names']]
#load stanza model
if stanza_model is not None:
lemma_columns = stanza_model('\n\n'.join(concate_columns).replace(' ','none'))
lemma_columns_collect = []
for sent in lemma_columns.sentences:
tmp = []
for word in sent.words:
if word.lemma != None:
tmp.append(word.lemma)
elif word.text==' ':
tmp.append('none')
else:
tmp.append(word.text)
lemma_columns_collect.append(' '.join(tmp))
lemma_tables = stanza_model('\n\n'.join(concate_tables).replace(' ','none'))
lemma_tables_collect = {}
for t,sent in zip(concate_tables, lemma_tables.sentences):
tmp = []
for word in sent.words:
if word.lemma != None:
tmp.append(word.lemma)
elif word.text == ' ':
tmp.append('none')
else:
tmp.append(word.text)
lemma_tables_collect[t]=' '.join(tmp)
else:
lemma_columns_collect = concate_columns
lemma_tables_collect = {t:t for t in concate_tables}
for i, (column, text, column_type) in enumerate(zip(db['column_names_original'], db['column_names'], db['column_types'])):
table_id, column_name = column
_, column_text = text
table_name = db['table_names_original'][table_id]
if table_name not in schemas[db_id]:
table_text = db['table_names'][table_id]
table_lemma = lemma_tables_collect[table_text]
schemas[db_id][table_name] = Table(table_name, table_text, [], table_lemma)
if column_name == "*":
continue
is_primary_key = i in db['primary_keys']
table_column = TableColumn(column_name.lower(), column_text, column_type, is_primary_key, None, lemma_columns_collect[i])
schemas[db_id][table_name].columns.append(table_column)
column_id_to_table[i] = table_name
column_id_to_column[i] = table_column
for (c1, c2) in db['foreign_keys']:
foreign_key = column_id_to_table[c2] + ':' + column_id_to_column[c2].name
column_id_to_column[c1].foreign_key = foreign_key
return {**schemas}
def read_dataset_values(db_id: str, dataset_path: str, tables: List[str]):
db = os.path.join(dataset_path, db_id, db_id + ".sqlite")
try:
conn = sqlite3.connect(db)
except Exception as e:
raise Exception(f"Can't connect to SQL: {e} in path {db}")
conn.text_factory = str
cursor = conn.cursor()
values = {}
for table in tables:
try:
cursor.execute(f"SELECT * FROM {table.name} LIMIT 5000")
values[table] = cursor.fetchall()
except:
conn.text_factory = lambda x: str(x, 'latin1')
cursor = conn.cursor()
cursor.execute(f"SELECT * FROM {table.name} LIMIT 5000")
values[table] = cursor.fetchall()
return values
def ent_key_to_name(key):
parts = key.split(':')
if parts[0] == 'table':
return parts[1]
elif parts[0] == 'column':
_, _, table_name, column_name = parts
return f'{table_name}@{column_name}'
else:
return parts[1]
def fix_number_value(ex):
"""
There is something weird in the dataset files - the `query_toks_no_value` field anonymizes all values,
which is good since the evaluator doesn't check for the values. But it also anonymizes numbers that
should not be anonymized: e.g. LIMIT 3 becomes LIMIT 'value', while the evaluator fails if it is not a number.
"""
def split_and_keep(s, sep):
if not s: return [''] # consistent with string.split()
# Find replacement character that is not used in string
# i.e. just use the highest available character plus one
# Note: This fails if ord(max(s)) = 0x10FFFF (ValueError)
p = chr(ord(max(s)) + 1)
return s.replace(sep, p + sep + p).split(p)
# input is tokenized in different ways... so first try to make splits equal
query_toks = ex['query_toks']
ex['query_toks'] = []
for q in query_toks:
ex['query_toks'] += split_and_keep(q, '.')
i_val, i_no_val = 0, 0
while i_val < len(ex['query_toks']) and i_no_val < len(ex['query_toks_no_value']):
if ex['query_toks_no_value'][i_no_val] != 'value':
i_val += 1
i_no_val += 1
continue
i_val_end = i_val
while i_val + 1 < len(ex['query_toks']) and \
i_no_val + 1 < len(ex['query_toks_no_value']) and \
ex['query_toks'][i_val_end + 1].lower() != ex['query_toks_no_value'][i_no_val + 1].lower():
i_val_end += 1
if i_val == i_val_end and ex['query_toks'][i_val] in ["1", "2", "3", "4", "5"] and ex['query_toks'][i_val - 1].lower() == "limit":
ex['query_toks_no_value'][i_no_val] = ex['query_toks'][i_val]
i_val = i_val_end
i_val += 1
i_no_val += 1
return ex
_schemas_cache = None
def disambiguate_items(db_id: str, query_toks: List[str], tables_file: str, allow_aliases: bool) -> List[str]:
"""
we want the query tokens to be non-ambiguous - so we can change each column name to explicitly
tell which table it belongs to
parsed sql to sql clause is based on supermodel.gensql from syntaxsql
"""
class Schema:
"""
Simple schema which maps table&column to a unique identifier
"""
def __init__(self, schema, table):
self._schema = schema
self._table = table
self._idMap = self._map(self._schema, self._table)
@property
def schema(self):
return self._schema
@property
def idMap(self):
return self._idMap
def _map(self, schema, table):
column_names_original = table['column_names_original']
table_names_original = table['table_names_original']
# print 'column_names_original: ', column_names_original
# print 'table_names_original: ', table_names_original
for i, (tab_id, col) in enumerate(column_names_original):
if tab_id == -1:
idMap = {'*': i}
else:
key = table_names_original[tab_id].lower()
val = col.lower().replace(' ','_')
idMap[key + "." + val] = i
for i, tab in enumerate(table_names_original):
key = tab.lower()
idMap[key] = i
return idMap
def get_schemas_from_json(fpath):
global _schemas_cache
if _schemas_cache is not None:
return _schemas_cache
with open(fpath, encoding='utf-8') as f:
data = json.load(f)
db_names = [db['db_id'] for db in data]
tables = {}
schemas = {}
for db in data:
db_id = db['db_id']
schema = {} # {'table': [col.lower, ..., ]} * -> __all__
column_names_original = db['column_names_original'] if 'column_names_original' in db else db['column_names']
table_names_original = db['table_names_original'] if 'table_names_original' in db else db['table_names']
tables[db_id] = {'column_names_original': column_names_original,
'table_names_original': table_names_original}
for i, tabn in enumerate(table_names_original):
table = str(tabn.lower())
cols = [str(col.lower().replace(' ','_')) for td, col in column_names_original if td == i]
schema[table] = cols
schemas[db_id] = schema
_schemas_cache = schemas, db_names, tables
return _schemas_cache
schemas, db_names, tables = get_schemas_from_json(tables_file)
schema = Schema(schemas[db_id], tables[db_id])
fixed_toks = []
i = 0
while i < len(query_toks):
tok = query_toks[i]
if tok == 'value' or tok == "'value'":
# TODO: value should alawys be between '/" (remove first if clause)
new_tok = f'"{tok}"'
elif tok in ['!','<','>'] and query_toks[i+1] == '=':
new_tok = tok + '='
i += 1
# elif i+1 < len(query_toks) and query_toks[i+1] == '.' and query_toks[i] in schema.schema.keys():
elif i + 1 < len(query_toks) and query_toks[i + 1] == '.':
new_tok = ''.join(query_toks[i:i+3])
i += 2
else:
new_tok = tok
fixed_toks.append(new_tok)
i += 1
toks = fixed_toks
tables_with_alias = get_tables_with_alias(schema.schema, toks)
_, sql, mapped_entities = parse_sql(toks, 0, tables_with_alias, schema, mapped_entities_fn=lambda: [])
for i, new_name in mapped_entities:
curr_tok = toks[i]
if '.' in curr_tok and allow_aliases:
parts = curr_tok.split('.')
assert(len(parts) == 2)
toks[i] = parts[0] + '.' + new_name
else:
toks[i] = new_name
if not allow_aliases:
toks = [tok for tok in toks if tok not in ['as', 't1', 't2', 't3', 't4', 't5', 't6', 't7', 't8', 't9', 't10']]
toks = [f'\'value\'' if tok == '"value"' else tok for tok in toks]
return toks
def remove_on(query):
query_tok = query.split()
sql_words = []
t = 0
while t < len(query_tok):
if query_tok[t] != 'on':
sql_words.append(query_tok[t])
t += 1
else:
t += 4
return ' '.join(sql_words)
def read_dataset_values_from_json(db_id: str, db_content_dict: Dict[str, Any], tables: List[str]):
values = {}
item = db_content_dict[db_id]
for table in tables:
values[table] = item['tables'][table.name]['cell']
return values
def extract_tree_style(sent):
"""
sent: List
"""
rnt = []
if __name__ == '__main__':
import stanza
stanza_model = stanza.Pipeline('en')
doc = stanza_model("what is the name of the breed with the most dogs ?")
word=[word.lemma for sent in doc.sentences for word in sent.words]
rnt = []
|
ContextualSP/unified_parser_text_to_sql/semparse/sql/spider_utils.py/0
|
{
"file_path": "ContextualSP/unified_parser_text_to_sql/semparse/sql/spider_utils.py",
"repo_id": "ContextualSP",
"token_count": 5863
}
| 256 |
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
class LinearSuper(nn.Linear):
def __init__(self, super_in_dim, super_out_dim, bias=True, uniform_=None, non_linear='linear', scale=False):
super().__init__(super_in_dim, super_out_dim, bias=bias)
# super_in_dim and super_out_dim indicate the largest network!
self.super_in_dim = super_in_dim
self.super_out_dim = super_out_dim
# input_dim and output_dim indicate the current sampled size
self.sample_in_dim = None
self.sample_out_dim = None
self.samples = {}
self.scale = scale
self._reset_parameters(bias, uniform_, non_linear)
self.profiling = False
def profile(self, mode=True):
self.profiling = mode
def sample_parameters(self, resample=False):
if self.profiling or resample:
return self._sample_parameters()
return self.samples
def _reset_parameters(self, bias, uniform_, non_linear):
nn.init.xavier_uniform_(self.weight) if uniform_ is None else uniform_(
self.weight, non_linear=non_linear)
if bias:
nn.init.constant_(self.bias, 0.)
def set_sample_config(self, sample_in_dim, sample_out_dim):
self.sample_in_dim = sample_in_dim
self.sample_out_dim = sample_out_dim
self._sample_parameters()
def _sample_parameters(self):
self.samples['weight'] = sample_weight(self.weight, self.sample_in_dim, self.sample_out_dim)
self.samples['bias'] = self.bias
self.sample_scale = self.super_out_dim/self.sample_out_dim
if self.bias is not None:
self.samples['bias'] = sample_bias(self.bias, self.sample_out_dim)
return self.samples
def forward(self, x):
self.sample_parameters()
return F.linear(x, self.samples['weight'], self.samples['bias']) * (self.sample_scale if self.scale else 1)
def calc_sampled_param_num(self):
assert 'weight' in self.samples.keys()
weight_numel = self.samples['weight'].numel()
if self.samples['bias'] is not None:
bias_numel = self.samples['bias'].numel()
else:
bias_numel = 0
return weight_numel + bias_numel
def get_complexity(self, sequence_length):
total_flops = 0
total_flops += sequence_length * np.prod(self.samples['weight'].size())
return total_flops
def sample_weight(weight, sample_in_dim, sample_out_dim):
sample_weight = weight[:, :sample_in_dim]
sample_weight = sample_weight[:sample_out_dim, :]
return sample_weight
def sample_bias(bias, sample_out_dim):
sample_bias = bias[:sample_out_dim]
return sample_bias
|
Cream/AutoFormer/model/module/Linear_super.py/0
|
{
"file_path": "Cream/AutoFormer/model/module/Linear_super.py",
"repo_id": "Cream",
"token_count": 1177
}
| 257 |
from .base import BaseFileHandler
from .json_handler import JsonHandler
from .pickle_handler import PickleHandler
from .yaml_handler import YamlHandler
__all__ = ['BaseFileHandler', 'JsonHandler', 'PickleHandler', 'YamlHandler']
|
Cream/CDARTS/CDARTS_detection/mmcv/fileio/handlers/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/fileio/handlers/__init__.py",
"repo_id": "Cream",
"token_count": 66
}
| 258 |
import torch
from torch.nn.parallel._functions import _get_stream
def scatter(input, devices, streams=None):
"""Scatters tensor across multiple GPUs.
"""
if streams is None:
streams = [None] * len(devices)
if isinstance(input, list):
chunk_size = (len(input) - 1) // len(devices) + 1
outputs = [
scatter(input[i], [devices[i // chunk_size]],
[streams[i // chunk_size]]) for i in range(len(input))
]
return outputs
elif isinstance(input, torch.Tensor):
output = input.contiguous()
# TODO: copy to a pinned buffer first (if copying from CPU)
stream = streams[0] if output.numel() > 0 else None
with torch.cuda.device(devices[0]), torch.cuda.stream(stream):
output = output.cuda(devices[0], non_blocking=True)
return output
else:
raise Exception('Unknown type {}.'.format(type(input)))
def synchronize_stream(output, devices, streams):
if isinstance(output, list):
chunk_size = len(output) // len(devices)
for i in range(len(devices)):
for j in range(chunk_size):
synchronize_stream(output[i * chunk_size + j], [devices[i]],
[streams[i]])
elif isinstance(output, torch.Tensor):
if output.numel() != 0:
with torch.cuda.device(devices[0]):
main_stream = torch.cuda.current_stream()
main_stream.wait_stream(streams[0])
output.record_stream(main_stream)
else:
raise Exception('Unknown type {}.'.format(type(output)))
def get_input_device(input):
if isinstance(input, list):
for item in input:
input_device = get_input_device(item)
if input_device != -1:
return input_device
return -1
elif isinstance(input, torch.Tensor):
return input.get_device() if input.is_cuda else -1
else:
raise Exception('Unknown type {}.'.format(type(input)))
class Scatter(object):
@staticmethod
def forward(target_gpus, input):
input_device = get_input_device(input)
streams = None
if input_device == -1:
# Perform CPU to GPU copies in a background stream
streams = [_get_stream(device) for device in target_gpus]
outputs = scatter(input, target_gpus, streams)
# Synchronize with the copy stream
if streams is not None:
synchronize_stream(outputs, target_gpus, streams)
return tuple(outputs)
|
Cream/CDARTS/CDARTS_detection/mmcv/parallel/_functions.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/parallel/_functions.py",
"repo_id": "Cream",
"token_count": 1125
}
| 259 |
from __future__ import print_function
import logging
import os
import os.path as osp
import time
from datetime import datetime
from threading import Thread
import requests
from six.moves.queue import Empty, Queue
from ...utils import get_host_info, master_only
from .base import LoggerHook
class PaviClient(object):
def __init__(self, url, username=None, password=None, instance_id=None):
self.url = url
self.username = self._get_env_var(username, 'PAVI_USERNAME')
self.password = self._get_env_var(password, 'PAVI_PASSWORD')
self.instance_id = instance_id
self.log_queue = None
self.logger = None
def _get_env_var(self, var, env_var):
if var is not None:
return str(var)
var = os.getenv(env_var)
if not var:
raise ValueError(
'"{}" is neither specified nor defined as env variables'.
format(env_var))
return var
def _print_log(self, msg, level=logging.INFO, *args, **kwargs):
if self.logger is not None:
self.logger.log(level, msg, *args, **kwargs)
else:
print(msg, *args, **kwargs)
def connect(self,
model_name,
work_dir=None,
info=dict(),
timeout=5,
logger=None):
if logger is not None:
self.logger = logger
self._print_log('connecting pavi service {}...'.format(self.url))
post_data = dict(
time=str(datetime.now()),
username=self.username,
password=self.password,
instance_id=self.instance_id,
model=model_name,
work_dir=osp.abspath(work_dir) if work_dir else '',
session_file=info.get('session_file', ''),
session_text=info.get('session_text', ''),
model_text=info.get('model_text', ''),
device=get_host_info())
try:
response = requests.post(self.url, json=post_data, timeout=timeout)
except Exception as ex:
self._print_log(
'fail to connect to pavi service: {}'.format(ex),
level=logging.ERROR)
else:
if response.status_code == 200:
self.instance_id = response.text
self._print_log(
'pavi service connected, instance_id: {}'.format(
self.instance_id))
self.log_queue = Queue()
self.log_thread = Thread(target=self.post_worker_fn)
self.log_thread.daemon = True
self.log_thread.start()
return True
else:
self._print_log(
'fail to connect to pavi service, status code: '
'{}, err message: {}'.format(response.status_code,
response.reason),
level=logging.ERROR)
return False
def post_worker_fn(self, max_retry=3, queue_timeout=1, req_timeout=3):
while True:
try:
log = self.log_queue.get(timeout=queue_timeout)
except Empty:
time.sleep(1)
except Exception as ex:
self._print_log(
'fail to get logs from queue: {}'.format(ex),
level=logging.ERROR)
else:
retry = 0
while retry < max_retry:
try:
response = requests.post(
self.url, json=log, timeout=req_timeout)
except Exception as ex:
retry += 1
self._print_log(
'error when posting logs to pavi: {}'.format(ex),
level=logging.ERROR)
else:
status_code = response.status_code
if status_code == 200:
break
else:
self._print_log(
'unexpected status code: {}, err msg: {}'.
format(status_code, response.reason),
level=logging.ERROR)
retry += 1
if retry == max_retry:
self._print_log(
'fail to send logs of iteration {}'.format(
log['iter_num']),
level=logging.ERROR)
def log(self, phase, iter, outputs):
if self.log_queue is not None:
logs = {
'time': str(datetime.now()),
'instance_id': self.instance_id,
'flow_id': phase,
'iter_num': iter,
'outputs': outputs,
'msg': ''
}
self.log_queue.put(logs)
class PaviLoggerHook(LoggerHook):
def __init__(self,
url,
username=None,
password=None,
instance_id=None,
config_file=None,
interval=10,
ignore_last=True,
reset_flag=True):
self.pavi = PaviClient(url, username, password, instance_id)
self.config_file = config_file
super(PaviLoggerHook, self).__init__(interval, ignore_last, reset_flag)
def before_run(self, runner):
super(PaviLoggerHook, self).before_run(runner)
self.connect(runner)
@master_only
def connect(self, runner, timeout=5):
cfg_info = dict()
if self.config_file is not None:
with open(self.config_file, 'r') as f:
config_text = f.read()
cfg_info.update(
session_file=self.config_file, session_text=config_text)
return self.pavi.connect(runner.model_name, runner.work_dir, cfg_info,
timeout, runner.logger)
@master_only
def log(self, runner):
log_outs = runner.log_buffer.output.copy()
log_outs.pop('time', None)
log_outs.pop('data_time', None)
for k, v in log_outs.items():
if isinstance(v, str):
log_outs.pop(k)
self.pavi.log(runner.mode, runner.iter + 1, log_outs)
|
Cream/CDARTS/CDARTS_detection/mmcv/runner/hooks/logger/pavi.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/runner/hooks/logger/pavi.py",
"repo_id": "Cream",
"token_count": 3491
}
| 260 |
import sys
from multiprocessing import Pool
from .misc import collections_abc
from .timer import Timer
class ProgressBar(object):
"""A progress bar which can print the progress"""
def __init__(self, task_num=0, bar_width=50, start=True):
self.task_num = task_num
max_bar_width = self._get_max_bar_width()
self.bar_width = (
bar_width if bar_width <= max_bar_width else max_bar_width)
self.completed = 0
if start:
self.start()
def _get_max_bar_width(self):
if sys.version_info > (3, 3):
from shutil import get_terminal_size
else:
from backports.shutil_get_terminal_size import get_terminal_size
terminal_width, _ = get_terminal_size()
max_bar_width = min(int(terminal_width * 0.6), terminal_width - 50)
if max_bar_width < 10:
print('terminal width is too small ({}), please consider '
'widen the terminal for better progressbar '
'visualization'.format(terminal_width))
max_bar_width = 10
return max_bar_width
def start(self):
if self.task_num > 0:
sys.stdout.write('[{}] 0/{}, elapsed: 0s, ETA:'.format(
' ' * self.bar_width, self.task_num))
else:
sys.stdout.write('completed: 0, elapsed: 0s')
sys.stdout.flush()
self.timer = Timer()
def update(self):
self.completed += 1
elapsed = self.timer.since_start()
fps = self.completed / elapsed
if self.task_num > 0:
percentage = self.completed / float(self.task_num)
eta = int(elapsed * (1 - percentage) / percentage + 0.5)
mark_width = int(self.bar_width * percentage)
bar_chars = '>' * mark_width + ' ' * (self.bar_width - mark_width)
sys.stdout.write(
'\r[{}] {}/{}, {:.1f} task/s, elapsed: {}s, ETA: {:5}s'.format(
bar_chars, self.completed, self.task_num, fps,
int(elapsed + 0.5), eta))
else:
sys.stdout.write(
'completed: {}, elapsed: {}s, {:.1f} tasks/s'.format(
self.completed, int(elapsed + 0.5), fps))
sys.stdout.flush()
def track_progress(func, tasks, bar_width=50, **kwargs):
"""Track the progress of tasks execution with a progress bar.
Tasks are done with a simple for-loop.
Args:
func (callable): The function to be applied to each task.
tasks (list or tuple[Iterable, int]): A list of tasks or
(tasks, total num).
bar_width (int): Width of progress bar.
Returns:
list: The task results.
"""
if isinstance(tasks, tuple):
assert len(tasks) == 2
assert isinstance(tasks[0], collections_abc.Iterable)
assert isinstance(tasks[1], int)
task_num = tasks[1]
tasks = tasks[0]
elif isinstance(tasks, collections_abc.Iterable):
task_num = len(tasks)
else:
raise TypeError(
'"tasks" must be an iterable object or a (iterator, int) tuple')
prog_bar = ProgressBar(task_num, bar_width)
results = []
for task in tasks:
results.append(func(task, **kwargs))
prog_bar.update()
sys.stdout.write('\n')
return results
def init_pool(process_num, initializer=None, initargs=None):
if initializer is None:
return Pool(process_num)
elif initargs is None:
return Pool(process_num, initializer)
else:
if not isinstance(initargs, tuple):
raise TypeError('"initargs" must be a tuple')
return Pool(process_num, initializer, initargs)
def track_parallel_progress(func,
tasks,
nproc,
initializer=None,
initargs=None,
bar_width=50,
chunksize=1,
skip_first=False,
keep_order=True):
"""Track the progress of parallel task execution with a progress bar.
The built-in :mod:`multiprocessing` module is used for process pools and
tasks are done with :func:`Pool.map` or :func:`Pool.imap_unordered`.
Args:
func (callable): The function to be applied to each task.
tasks (list or tuple[Iterable, int]): A list of tasks or
(tasks, total num).
nproc (int): Process (worker) number.
initializer (None or callable): Refer to :class:`multiprocessing.Pool`
for details.
initargs (None or tuple): Refer to :class:`multiprocessing.Pool` for
details.
chunksize (int): Refer to :class:`multiprocessing.Pool` for details.
bar_width (int): Width of progress bar.
skip_first (bool): Whether to skip the first sample for each worker
when estimating fps, since the initialization step may takes
longer.
keep_order (bool): If True, :func:`Pool.imap` is used, otherwise
:func:`Pool.imap_unordered` is used.
Returns:
list: The task results.
"""
if isinstance(tasks, tuple):
assert len(tasks) == 2
assert isinstance(tasks[0], collections_abc.Iterable)
assert isinstance(tasks[1], int)
task_num = tasks[1]
tasks = tasks[0]
elif isinstance(tasks, collections_abc.Iterable):
task_num = len(tasks)
else:
raise TypeError(
'"tasks" must be an iterable object or a (iterator, int) tuple')
pool = init_pool(nproc, initializer, initargs)
start = not skip_first
task_num -= nproc * chunksize * int(skip_first)
prog_bar = ProgressBar(task_num, bar_width, start)
results = []
if keep_order:
gen = pool.imap(func, tasks, chunksize)
else:
gen = pool.imap_unordered(func, tasks, chunksize)
for result in gen:
results.append(result)
if skip_first:
if len(results) < nproc * chunksize:
continue
elif len(results) == nproc * chunksize:
prog_bar.start()
continue
prog_bar.update()
sys.stdout.write('\n')
pool.close()
pool.join()
return results
|
Cream/CDARTS/CDARTS_detection/mmcv/utils/progressbar.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/utils/progressbar.py",
"repo_id": "Cream",
"token_count": 2937
}
| 261 |
Metadata-Version: 2.1
Name: mmdet
Version: 0.6.0+889383
Summary: Open MMLab Detection Toolbox
Home-page: https://github.com/open-mmlab/mmdetection
License: Apache License 2.0
Keywords: computer vision,object detection
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
# Hit-Detector Code Base
Implementation of our CVPR2020 paper [Hit-Detector: Hierarchical Trinity Architecture Search for Object Detection](https://arxiv.org/pdf/2003.11818.pdf)
We released the searched Hit-Detector Architecture.
### Environments
- Python 3.6
- Pytorch>=1.1.0
- Torchvision == 0.3.0
You can directly run the code ```sh env.sh``` to setup the running environment.
We use 8 GPUs (32GB V100) to train our detector, you can adjust the batch size in configs by yourselves.
### Data Preparatoin
Your directory tree should be look like this:
````bash
$HitDet.pytorch/data
├── coco
│ ├── annotations
│ ├── train2017
│ └── val2017
│
├── VOCdevkit
│ ├── VOC2007
│ │ ├── Annotations
│ │ ├── ImageSets
│ │ ├── JPEGImages
│ │ ├── SegmentationClass
│ │ └── SegmentationObject
│ └── VOC2012
│ ├── Annotations
│ ├── ImageSets
│ ├── JPEGImages
│ ├── SegmentationClass
│ └── SegmentationObject
````
### Getting Start
Our pretrained backbone params can be found in [BaiduCloud](https://pan.baidu.com/s/1mH4-qowzqlydhQ5VIaK--g). pwd: jbsm or [GoogleDrive](https://drive.google.com/open?id=1nFtzqsroOpMEpjc8Go1GKvope55UaxrC)
Train the searched model:
```
cd scripts
sh train_hit_det.sh
```
### Results on COCO minival
| Model | Params | mAP |
| :---- | :----: | :----:|
| FPN | 41.8M | 36.6 |
| Hit-Det | 27.6M | 41.3 |
## Citation
```
@InProceedings{guo2020hit,
author = {Guo, Jianyuan and Han, Kai and Wang, Yunhe and Zhang, Chao and Yang, Zhaohui and Wu, Han and Chen, Xinghao and Xu, Chang},
title = {Hit-Detector: Hierarchical Trinity Architecture Search for Object Detection},
booktitle = {arXiv preprint arXiv:2003.11818},
year = {2020}
}
```
## Acknowledgement
Our code is based on the open source project [MMDetection](https://github.com/open-mmlab/mmdetection).
|
Cream/CDARTS/CDARTS_detection/mmdet.egg-info/PKG-INFO/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet.egg-info/PKG-INFO",
"repo_id": "Cream",
"token_count": 863
}
| 262 |
from .geometry import bbox_overlaps
from .assigners import BaseAssigner, MaxIoUAssigner, AssignResult
from .samplers import (BaseSampler, PseudoSampler, RandomSampler,
InstanceBalancedPosSampler, IoUBalancedNegSampler,
CombinedSampler, SamplingResult)
from .assign_sampling import build_assigner, build_sampler, assign_and_sample
from .transforms import (bbox2delta, delta2bbox, bbox_flip, bbox_mapping,
bbox_mapping_back, bbox2roi, roi2bbox, bbox2result,
distance2bbox)
from .bbox_target import bbox_target
__all__ = [
'bbox_overlaps', 'BaseAssigner', 'MaxIoUAssigner', 'AssignResult',
'BaseSampler', 'PseudoSampler', 'RandomSampler',
'InstanceBalancedPosSampler', 'IoUBalancedNegSampler', 'CombinedSampler',
'SamplingResult', 'build_assigner', 'build_sampler', 'assign_and_sample',
'bbox2delta', 'delta2bbox', 'bbox_flip', 'bbox_mapping',
'bbox_mapping_back', 'bbox2roi', 'roi2bbox', 'bbox2result',
'distance2bbox', 'bbox_target'
]
|
Cream/CDARTS/CDARTS_detection/mmdet/core/bbox/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/core/bbox/__init__.py",
"repo_id": "Cream",
"token_count": 474
}
| 263 |
import numpy as np
import torch
from .base_sampler import BaseSampler
class RandomSampler(BaseSampler):
def __init__(self,
num,
pos_fraction,
neg_pos_ub=-1,
add_gt_as_proposals=True,
**kwargs):
super(RandomSampler, self).__init__(num, pos_fraction, neg_pos_ub,
add_gt_as_proposals)
@staticmethod
def random_choice(gallery, num):
"""Random select some elements from the gallery.
It seems that Pytorch's implementation is slower than numpy so we use
numpy to randperm the indices.
"""
assert len(gallery) >= num
if isinstance(gallery, list):
gallery = np.array(gallery)
cands = np.arange(len(gallery))
np.random.shuffle(cands)
rand_inds = cands[:num]
if not isinstance(gallery, np.ndarray):
rand_inds = torch.from_numpy(rand_inds).long().to(gallery.device)
return gallery[rand_inds]
def _sample_pos(self, assign_result, num_expected, **kwargs):
"""Randomly sample some positive samples."""
pos_inds = torch.nonzero(assign_result.gt_inds > 0)
if pos_inds.numel() != 0:
pos_inds = pos_inds.squeeze(1)
if pos_inds.numel() <= num_expected:
return pos_inds
else:
return self.random_choice(pos_inds, num_expected)
def _sample_neg(self, assign_result, num_expected, **kwargs):
"""Randomly sample some negative samples."""
neg_inds = torch.nonzero(assign_result.gt_inds == 0)
if neg_inds.numel() != 0:
neg_inds = neg_inds.squeeze(1)
if len(neg_inds) <= num_expected:
return neg_inds
else:
return self.random_choice(neg_inds, num_expected)
|
Cream/CDARTS/CDARTS_detection/mmdet/core/bbox/samplers/random_sampler.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/core/bbox/samplers/random_sampler.py",
"repo_id": "Cream",
"token_count": 904
}
| 264 |
import mmcv
def split_combined_polys(polys, poly_lens, polys_per_mask):
"""Split the combined 1-D polys into masks.
A mask is represented as a list of polys, and a poly is represented as
a 1-D array. In dataset, all masks are concatenated into a single 1-D
tensor. Here we need to split the tensor into original representations.
Args:
polys (list): a list (length = image num) of 1-D tensors
poly_lens (list): a list (length = image num) of poly length
polys_per_mask (list): a list (length = image num) of poly number
of each mask
Returns:
list: a list (length = image num) of list (length = mask num) of
list (length = poly num) of numpy array
"""
mask_polys_list = []
for img_id in range(len(polys)):
polys_single = polys[img_id]
polys_lens_single = poly_lens[img_id].tolist()
polys_per_mask_single = polys_per_mask[img_id].tolist()
split_polys = mmcv.slice_list(polys_single, polys_lens_single)
mask_polys = mmcv.slice_list(split_polys, polys_per_mask_single)
mask_polys_list.append(mask_polys)
return mask_polys_list
|
Cream/CDARTS/CDARTS_detection/mmdet/core/mask/utils.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/core/mask/utils.py",
"repo_id": "Cream",
"token_count": 485
}
| 265 |
from .compose import Compose
from .formating import (Collect, ImageToTensor, ToDataContainer, ToTensor,
Transpose, to_tensor)
from .loading import LoadAnnotations, LoadImageFromFile, LoadProposals
from .test_aug import MultiScaleFlipAug
from .transforms import (Albu, Expand, MinIoURandomCrop, Normalize, Pad,
PhotoMetricDistortion, RandomCrop, RandomFlip, Resize,
SegResizeFlipPadRescale)
__all__ = [
'Compose', 'to_tensor', 'ToTensor', 'ImageToTensor', 'ToDataContainer',
'Transpose', 'Collect', 'LoadAnnotations', 'LoadImageFromFile',
'LoadProposals', 'MultiScaleFlipAug', 'Resize', 'RandomFlip', 'Pad',
'RandomCrop', 'Normalize', 'SegResizeFlipPadRescale', 'MinIoURandomCrop',
'Expand', 'PhotoMetricDistortion', 'Albu'
]
|
Cream/CDARTS/CDARTS_detection/mmdet/datasets/pipelines/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/datasets/pipelines/__init__.py",
"repo_id": "Cream",
"token_count": 336
}
| 266 |
import torch.nn as nn
from mmcv.cnn import normal_init
from .guided_anchor_head import GuidedAnchorHead, FeatureAdaption
from ..registry import HEADS
from ..utils import bias_init_with_prob, ConvModule
from mmdet.ops import MaskedConv2d
@HEADS.register_module
class GARetinaHead(GuidedAnchorHead):
"""Guided-Anchor-based RetinaNet head."""
def __init__(self,
num_classes,
in_channels,
stacked_convs=4,
conv_cfg=None,
norm_cfg=None,
**kwargs):
self.stacked_convs = stacked_convs
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
super(GARetinaHead, self).__init__(num_classes, in_channels, **kwargs)
def _init_layers(self):
self.relu = nn.ReLU(inplace=True)
self.cls_convs = nn.ModuleList()
self.reg_convs = nn.ModuleList()
for i in range(self.stacked_convs):
chn = self.in_channels if i == 0 else self.feat_channels
self.cls_convs.append(
ConvModule(chn,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg))
self.reg_convs.append(
ConvModule(chn,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg))
self.conv_loc = nn.Conv2d(self.feat_channels, 1, 1)
self.conv_shape = nn.Conv2d(self.feat_channels, self.num_anchors * 2,
1)
self.feature_adaption_cls = FeatureAdaption(
self.feat_channels,
self.feat_channels,
kernel_size=3,
deformable_groups=self.deformable_groups)
self.feature_adaption_reg = FeatureAdaption(
self.feat_channels,
self.feat_channels,
kernel_size=3,
deformable_groups=self.deformable_groups)
self.retina_cls = MaskedConv2d(self.feat_channels,
self.num_anchors *
self.cls_out_channels,
3,
padding=1)
self.retina_reg = MaskedConv2d(self.feat_channels,
self.num_anchors * 4,
3,
padding=1)
def init_weights(self):
for m in self.cls_convs:
normal_init(m.conv, std=0.01)
for m in self.reg_convs:
normal_init(m.conv, std=0.01)
self.feature_adaption_cls.init_weights()
self.feature_adaption_reg.init_weights()
bias_cls = bias_init_with_prob(0.01)
normal_init(self.conv_loc, std=0.01, bias=bias_cls)
normal_init(self.conv_shape, std=0.01)
normal_init(self.retina_cls, std=0.01, bias=bias_cls)
normal_init(self.retina_reg, std=0.01)
def forward_single(self, x):
cls_feat = x
reg_feat = x
for cls_conv in self.cls_convs:
cls_feat = cls_conv(cls_feat)
for reg_conv in self.reg_convs:
reg_feat = reg_conv(reg_feat)
loc_pred = self.conv_loc(cls_feat)
shape_pred = self.conv_shape(reg_feat)
cls_feat = self.feature_adaption_cls(cls_feat, shape_pred)
reg_feat = self.feature_adaption_reg(reg_feat, shape_pred)
if not self.training:
mask = loc_pred.sigmoid()[0] >= self.loc_filter_thr
else:
mask = None
cls_score = self.retina_cls(cls_feat, mask)
bbox_pred = self.retina_reg(reg_feat, mask)
return cls_score, bbox_pred, shape_pred, loc_pred
|
Cream/CDARTS/CDARTS_detection/mmdet/models/anchor_heads/ga_retina_head.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/anchor_heads/ga_retina_head.py",
"repo_id": "Cream",
"token_count": 2333
}
| 267 |
import logging
import torch.nn as nn
from mmcv.cnn import constant_init, kaiming_init
from mmcv.runner import load_checkpoint
from torch.nn.modules.batchnorm import _BatchNorm
from ..registry import BACKBONES
from ..utils import build_norm_layer, build_conv_layer
from .resnet import BasicBlock, Bottleneck
class HRModule(nn.Module):
""" High-Resolution Module for HRNet. In this module, every branch
has 4 BasicBlocks/Bottlenecks. Fusion/Exchange is in this module.
"""
def __init__(self,
num_branches,
blocks,
num_blocks,
in_channels,
num_channels,
multiscale_output=True,
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN')):
super(HRModule, self).__init__()
self._check_branches(num_branches, num_blocks, in_channels,
num_channels)
self.in_channels = in_channels
self.num_branches = num_branches
self.multiscale_output = multiscale_output
self.norm_cfg = norm_cfg
self.conv_cfg = conv_cfg
self.with_cp = with_cp
self.branches = self._make_branches(num_branches, blocks, num_blocks,
num_channels)
self.fuse_layers = self._make_fuse_layers()
self.relu = nn.ReLU(inplace=False)
def _check_branches(self, num_branches, num_blocks, in_channels,
num_channels):
if num_branches != len(num_blocks):
error_msg = 'NUM_BRANCHES({}) <> NUM_BLOCKS({})'.format(
num_branches, len(num_blocks))
raise ValueError(error_msg)
if num_branches != len(num_channels):
error_msg = 'NUM_BRANCHES({}) <> NUM_CHANNELS({})'.format(
num_branches, len(num_channels))
raise ValueError(error_msg)
if num_branches != len(in_channels):
error_msg = 'NUM_BRANCHES({}) <> NUM_INCHANNELS({})'.format(
num_branches, len(in_channels))
raise ValueError(error_msg)
def _make_one_branch(self,
branch_index,
block,
num_blocks,
num_channels,
stride=1):
downsample = None
if stride != 1 or \
self.in_channels[branch_index] != \
num_channels[branch_index] * block.expansion:
downsample = nn.Sequential(
build_conv_layer(
self.conv_cfg,
self.in_channels[branch_index],
num_channels[branch_index] * block.expansion,
kernel_size=1,
stride=stride,
bias=False),
build_norm_layer(self.norm_cfg, num_channels[branch_index] *
block.expansion)[1])
layers = []
layers.append(
block(
self.in_channels[branch_index],
num_channels[branch_index],
stride,
downsample=downsample,
with_cp=self.with_cp,
norm_cfg=self.norm_cfg,
conv_cfg=self.conv_cfg))
self.in_channels[branch_index] = \
num_channels[branch_index] * block.expansion
for i in range(1, num_blocks[branch_index]):
layers.append(
block(
self.in_channels[branch_index],
num_channels[branch_index],
with_cp=self.with_cp,
norm_cfg=self.norm_cfg,
conv_cfg=self.conv_cfg))
return nn.Sequential(*layers)
def _make_branches(self, num_branches, block, num_blocks, num_channels):
branches = []
for i in range(num_branches):
branches.append(
self._make_one_branch(i, block, num_blocks, num_channels))
return nn.ModuleList(branches)
def _make_fuse_layers(self):
if self.num_branches == 1:
return None
num_branches = self.num_branches
in_channels = self.in_channels
fuse_layers = []
num_out_branches = num_branches if self.multiscale_output else 1
for i in range(num_out_branches):
fuse_layer = []
for j in range(num_branches):
if j > i:
fuse_layer.append(
nn.Sequential(
build_conv_layer(
self.conv_cfg,
in_channels[j],
in_channels[i],
kernel_size=1,
stride=1,
padding=0,
bias=False),
build_norm_layer(self.norm_cfg, in_channels[i])[1],
nn.Upsample(
scale_factor=2**(j - i), mode='nearest')))
elif j == i:
fuse_layer.append(None)
else:
conv_downsamples = []
for k in range(i - j):
if k == i - j - 1:
conv_downsamples.append(
nn.Sequential(
build_conv_layer(
self.conv_cfg,
in_channels[j],
in_channels[i],
kernel_size=3,
stride=2,
padding=1,
bias=False),
build_norm_layer(self.norm_cfg,
in_channels[i])[1]))
else:
conv_downsamples.append(
nn.Sequential(
build_conv_layer(
self.conv_cfg,
in_channels[j],
in_channels[j],
kernel_size=3,
stride=2,
padding=1,
bias=False),
build_norm_layer(self.norm_cfg,
in_channels[j])[1],
nn.ReLU(inplace=False)))
fuse_layer.append(nn.Sequential(*conv_downsamples))
fuse_layers.append(nn.ModuleList(fuse_layer))
return nn.ModuleList(fuse_layers)
def forward(self, x):
if self.num_branches == 1:
return [self.branches[0](x[0])]
for i in range(self.num_branches):
x[i] = self.branches[i](x[i])
x_fuse = []
for i in range(len(self.fuse_layers)):
y = 0
for j in range(self.num_branches):
if i == j:
y += x[j]
else:
y += self.fuse_layers[i][j](x[j])
x_fuse.append(self.relu(y))
return x_fuse
@BACKBONES.register_module
class HRNet(nn.Module):
"""HRNet backbone.
High-Resolution Representations for Labeling Pixels and Regions
arXiv: https://arxiv.org/abs/1904.04514
Args:
extra (dict): detailed configuration for each stage of HRNet.
conv_cfg (dict): dictionary to construct and config conv layer.
norm_cfg (dict): dictionary to construct and config norm layer.
norm_eval (bool): Whether to set norm layers to eval mode, namely,
freeze running stats (mean and var). Note: Effect on Batch Norm
and its variants only.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed.
zero_init_residual (bool): whether to use zero init for last norm layer
in resblocks to let them behave as identity.
"""
blocks_dict = {'BASIC': BasicBlock, 'BOTTLENECK': Bottleneck}
def __init__(self,
extra,
conv_cfg=None,
norm_cfg=dict(type='BN'),
norm_eval=True,
with_cp=False,
zero_init_residual=False):
super(HRNet, self).__init__()
self.extra = extra
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.norm_eval = norm_eval
self.with_cp = with_cp
self.zero_init_residual = zero_init_residual
# stem net
self.norm1_name, norm1 = build_norm_layer(self.norm_cfg, 64, postfix=1)
self.norm2_name, norm2 = build_norm_layer(self.norm_cfg, 64, postfix=2)
self.conv1 = build_conv_layer(
self.conv_cfg,
3,
64,
kernel_size=3,
stride=2,
padding=1,
bias=False)
self.add_module(self.norm1_name, norm1)
self.conv2 = build_conv_layer(
self.conv_cfg,
64,
64,
kernel_size=3,
stride=2,
padding=1,
bias=False)
self.add_module(self.norm2_name, norm2)
self.relu = nn.ReLU(inplace=True)
# stage 1
self.stage1_cfg = self.extra['stage1']
num_channels = self.stage1_cfg['num_channels'][0]
block_type = self.stage1_cfg['block']
num_blocks = self.stage1_cfg['num_blocks'][0]
block = self.blocks_dict[block_type]
stage1_out_channels = num_channels * block.expansion
self.layer1 = self._make_layer(block, 64, num_channels, num_blocks)
# stage 2
self.stage2_cfg = self.extra['stage2']
num_channels = self.stage2_cfg['num_channels']
block_type = self.stage2_cfg['block']
block = self.blocks_dict[block_type]
num_channels = [channel * block.expansion for channel in num_channels]
self.transition1 = self._make_transition_layer([stage1_out_channels],
num_channels)
self.stage2, pre_stage_channels = self._make_stage(
self.stage2_cfg, num_channels)
# stage 3
self.stage3_cfg = self.extra['stage3']
num_channels = self.stage3_cfg['num_channels']
block_type = self.stage3_cfg['block']
block = self.blocks_dict[block_type]
num_channels = [channel * block.expansion for channel in num_channels]
self.transition2 = self._make_transition_layer(pre_stage_channels,
num_channels)
self.stage3, pre_stage_channels = self._make_stage(
self.stage3_cfg, num_channels)
# stage 4
self.stage4_cfg = self.extra['stage4']
num_channels = self.stage4_cfg['num_channels']
block_type = self.stage4_cfg['block']
block = self.blocks_dict[block_type]
num_channels = [channel * block.expansion for channel in num_channels]
self.transition3 = self._make_transition_layer(pre_stage_channels,
num_channels)
self.stage4, pre_stage_channels = self._make_stage(
self.stage4_cfg, num_channels)
@property
def norm1(self):
return getattr(self, self.norm1_name)
@property
def norm2(self):
return getattr(self, self.norm2_name)
def _make_transition_layer(self, num_channels_pre_layer,
num_channels_cur_layer):
num_branches_cur = len(num_channels_cur_layer)
num_branches_pre = len(num_channels_pre_layer)
transition_layers = []
for i in range(num_branches_cur):
if i < num_branches_pre:
if num_channels_cur_layer[i] != num_channels_pre_layer[i]:
transition_layers.append(
nn.Sequential(
build_conv_layer(
self.conv_cfg,
num_channels_pre_layer[i],
num_channels_cur_layer[i],
kernel_size=3,
stride=1,
padding=1,
bias=False),
build_norm_layer(self.norm_cfg,
num_channels_cur_layer[i])[1],
nn.ReLU(inplace=True)))
else:
transition_layers.append(None)
else:
conv_downsamples = []
for j in range(i + 1 - num_branches_pre):
in_channels = num_channels_pre_layer[-1]
out_channels = num_channels_cur_layer[i] \
if j == i - num_branches_pre else in_channels
conv_downsamples.append(
nn.Sequential(
build_conv_layer(
self.conv_cfg,
in_channels,
out_channels,
kernel_size=3,
stride=2,
padding=1,
bias=False),
build_norm_layer(self.norm_cfg, out_channels)[1],
nn.ReLU(inplace=True)))
transition_layers.append(nn.Sequential(*conv_downsamples))
return nn.ModuleList(transition_layers)
def _make_layer(self, block, inplanes, planes, blocks, stride=1):
downsample = None
if stride != 1 or inplanes != planes * block.expansion:
downsample = nn.Sequential(
build_conv_layer(
self.conv_cfg,
inplanes,
planes * block.expansion,
kernel_size=1,
stride=stride,
bias=False),
build_norm_layer(self.norm_cfg, planes * block.expansion)[1])
layers = []
layers.append(
block(
inplanes,
planes,
stride,
downsample=downsample,
with_cp=self.with_cp,
norm_cfg=self.norm_cfg,
conv_cfg=self.conv_cfg))
inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(
block(
inplanes,
planes,
with_cp=self.with_cp,
norm_cfg=self.norm_cfg,
conv_cfg=self.conv_cfg))
return nn.Sequential(*layers)
def _make_stage(self, layer_config, in_channels, multiscale_output=True):
num_modules = layer_config['num_modules']
num_branches = layer_config['num_branches']
num_blocks = layer_config['num_blocks']
num_channels = layer_config['num_channels']
block = self.blocks_dict[layer_config['block']]
hr_modules = []
for i in range(num_modules):
# multi_scale_output is only used for the last module
if not multiscale_output and i == num_modules - 1:
reset_multiscale_output = False
else:
reset_multiscale_output = True
hr_modules.append(
HRModule(
num_branches,
block,
num_blocks,
in_channels,
num_channels,
reset_multiscale_output,
with_cp=self.with_cp,
norm_cfg=self.norm_cfg,
conv_cfg=self.conv_cfg))
return nn.Sequential(*hr_modules), in_channels
def init_weights(self, pretrained=None):
if isinstance(pretrained, str):
logger = logging.getLogger()
load_checkpoint(self, pretrained, strict=False, logger=logger)
elif pretrained is None:
for m in self.modules():
if isinstance(m, nn.Conv2d):
kaiming_init(m)
elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
constant_init(m, 1)
if self.zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
constant_init(m.norm3, 0)
elif isinstance(m, BasicBlock):
constant_init(m.norm2, 0)
else:
raise TypeError('pretrained must be a str or None')
def forward(self, x):
x = self.conv1(x)
x = self.norm1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.norm2(x)
x = self.relu(x)
x = self.layer1(x)
x_list = []
for i in range(self.stage2_cfg['num_branches']):
if self.transition1[i] is not None:
x_list.append(self.transition1[i](x))
else:
x_list.append(x)
y_list = self.stage2(x_list)
x_list = []
for i in range(self.stage3_cfg['num_branches']):
if self.transition2[i] is not None:
x_list.append(self.transition2[i](y_list[-1]))
else:
x_list.append(y_list[i])
y_list = self.stage3(x_list)
x_list = []
for i in range(self.stage4_cfg['num_branches']):
if self.transition3[i] is not None:
x_list.append(self.transition3[i](y_list[-1]))
else:
x_list.append(y_list[i])
y_list = self.stage4(x_list)
return y_list
def train(self, mode=True):
super(HRNet, self).train(mode)
if mode and self.norm_eval:
for m in self.modules():
# trick: eval have effect on BatchNorm only
if isinstance(m, _BatchNorm):
m.eval()
|
Cream/CDARTS/CDARTS_detection/mmdet/models/backbones/hrnet.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/backbones/hrnet.py",
"repo_id": "Cream",
"token_count": 10666
}
| 268 |
from torch import nn
from mmdet.utils import build_from_cfg
from .registry import (BACKBONES, NECKS, ROI_EXTRACTORS, SHARED_HEADS, HEADS,
LOSSES, DETECTORS)
def build(cfg, registry, default_args=None):
if isinstance(cfg, list):
modules = [
build_from_cfg(cfg_, registry, default_args) for cfg_ in cfg
]
return nn.Sequential(*modules)
else:
return build_from_cfg(cfg, registry, default_args)
def build_backbone(cfg):
return build(cfg, BACKBONES)
def build_neck(cfg):
return build(cfg, NECKS)
def build_roi_extractor(cfg):
return build(cfg, ROI_EXTRACTORS)
def build_shared_head(cfg):
return build(cfg, SHARED_HEADS)
def build_head(cfg):
return build(cfg, HEADS)
def build_loss(cfg):
return build(cfg, LOSSES)
def build_detector(cfg, train_cfg=None, test_cfg=None):
return build(cfg, DETECTORS, dict(train_cfg=train_cfg, test_cfg=test_cfg))
|
Cream/CDARTS/CDARTS_detection/mmdet/models/builder.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/builder.py",
"repo_id": "Cream",
"token_count": 406
}
| 269 |
import torch
import torch.nn as nn
from .base import BaseDetector
from .test_mixins import RPNTestMixin, BBoxTestMixin, MaskTestMixin
from .. import builder
from ..registry import DETECTORS
from mmdet.core import bbox2roi, bbox2result, build_assigner, build_sampler
@DETECTORS.register_module
class TwoStageDetector(BaseDetector, RPNTestMixin, BBoxTestMixin,
MaskTestMixin):
def __init__(self,
backbone,
neck=None,
shared_head=None,
rpn_head=None,
bbox_roi_extractor=None,
bbox_head=None,
mask_roi_extractor=None,
mask_head=None,
train_cfg=None,
test_cfg=None,
cls_roi_scale_factor=None,
reg_roi_scale_factor=None,
pretrained=None):
super(TwoStageDetector, self).__init__()
self.backbone = builder.build_backbone(backbone)
self.cls_roi_scale_factor = cls_roi_scale_factor
self.reg_roi_scale_factor = reg_roi_scale_factor
if neck is not None:
self.neck = builder.build_neck(neck)
if shared_head is not None:
self.shared_head = builder.build_shared_head(shared_head)
if rpn_head is not None:
self.rpn_head = builder.build_head(rpn_head)
if bbox_head is not None:
self.bbox_roi_extractor = builder.build_roi_extractor(
bbox_roi_extractor)
self.bbox_head = builder.build_head(bbox_head)
if mask_head is not None:
if mask_roi_extractor is not None:
self.mask_roi_extractor = builder.build_roi_extractor(
mask_roi_extractor)
self.share_roi_extractor = False
else:
self.share_roi_extractor = True
self.mask_roi_extractor = self.bbox_roi_extractor
self.mask_head = builder.build_head(mask_head)
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.init_weights(pretrained=pretrained)
@property
def with_rpn(self):
return hasattr(self, 'rpn_head') and self.rpn_head is not None
def init_weights(self, pretrained=None):
super(TwoStageDetector, self).init_weights(pretrained)
self.backbone.init_weights(pretrained=pretrained)
if self.with_neck:
if isinstance(self.neck, nn.Sequential):
for m in self.neck:
m.init_weights()
else:
self.neck.init_weights()
if self.with_shared_head:
self.shared_head.init_weights(pretrained=pretrained)
if self.with_rpn:
self.rpn_head.init_weights()
if self.with_bbox:
self.bbox_roi_extractor.init_weights()
self.bbox_head.init_weights()
if self.with_mask:
self.mask_head.init_weights()
if not self.share_roi_extractor:
self.mask_roi_extractor.init_weights()
def extract_feat(self, img):
x = self.backbone(img)
if self.with_neck:
x = self.neck(x)
if len(x) >= 2:
if x[1] is not None:
x = x
else:
x = x[0]
return x
def forward_dummy(self, img):
"""Used for computing network flops.
See `mmedetection/tools/get_flops.py`
"""
outs = ()
# backbone
x = self.extract_feat(img)
# rpn
if self.with_rpn:
rpn_outs = self.rpn_head(x)
outs = outs + (rpn_outs, )
proposals = torch.randn(1000, 4).cuda()
# bbox head
rois = bbox2roi([proposals])
if self.with_bbox:
bbox_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs], rois)
if self.with_shared_head:
bbox_feats = self.shared_head(bbox_feats)
cls_score, bbox_pred = self.bbox_head(bbox_feats)
outs = outs + (cls_score, bbox_pred)
# mask head
if self.with_mask:
mask_rois = rois[:100]
mask_feats = self.mask_roi_extractor(
x[:self.mask_roi_extractor.num_inputs], mask_rois)
if self.with_shared_head:
mask_feats = self.shared_head(mask_feats)
mask_pred = self.mask_head(mask_feats)
outs = outs + (mask_pred, )
return outs
def forward_train(self,
img,
img_meta,
gt_bboxes,
gt_labels,
gt_bboxes_ignore=None,
gt_masks=None,
proposals=None):
out = self.extract_feat(img)
if len(out) >= 4:
x = out
loss_latency = None
else:
x = out[0]
loss_latency = out[1]
losses = dict()
# RPN forward and loss
if self.with_rpn:
rpn_outs = self.rpn_head(x)
# return rpn_outs
rpn_loss_inputs = rpn_outs + (gt_bboxes, img_meta,
self.train_cfg.rpn)
rpn_losses = self.rpn_head.loss(
*rpn_loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore)
losses.update(rpn_losses)
proposal_cfg = self.train_cfg.get('rpn_proposal',
self.test_cfg.rpn)
proposal_inputs = rpn_outs + (img_meta, proposal_cfg)
proposal_list = self.rpn_head.get_bboxes(*proposal_inputs)
else:
proposal_list = proposals
# assign gts and sample proposals
if self.with_bbox or self.with_mask:
bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner)
bbox_sampler = build_sampler(
self.train_cfg.rcnn.sampler, context=self)
num_imgs = img.size(0)
if gt_bboxes_ignore is None:
gt_bboxes_ignore = [None for _ in range(num_imgs)]
sampling_results = []
for i in range(num_imgs):
assign_result = bbox_assigner.assign(
proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i],
gt_labels[i])
sampling_result = bbox_sampler.sample(
assign_result,
proposal_list[i],
gt_bboxes[i],
gt_labels[i],
feats=[lvl_feat[i][None] for lvl_feat in x])
sampling_results.append(sampling_result)
# bbox head forward and loss
if self.with_bbox:
rois = bbox2roi([res.bboxes for res in sampling_results])
# TODO: a more flexible way to decide which feature maps to use
bbox_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs], rois)
'''
bbox_feats_cls = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs],
rois,
roi_scale_factor=self.cls_roi_scale_factor)
bbox_feats_reg = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs],
rois,
roi_scale_factor=self.reg_roi_scale_factor)
'''
if self.with_shared_head:
bbox_feats = self.shared_head(bbox_feats)
cls_score, bbox_pred, loss_latency_head = self.bbox_head(bbox_feats)
if loss_latency_head is not None:
if loss_latency is not None:
loss_latency = loss_latency + loss_latency_head
else:
loss_latency = loss_latency_head
# cls_score, bbox_pred = self.bbox_head((bbox_feats_cls, bbox_feats_reg))
bbox_targets = self.bbox_head.get_target(
sampling_results, gt_bboxes, gt_labels, self.train_cfg.rcnn)
loss_bbox = self.bbox_head.loss(cls_score, bbox_pred,
*bbox_targets)
losses.update(loss_bbox)
# mask head forward and loss
if self.with_mask:
if not self.share_roi_extractor:
pos_rois = bbox2roi(
[res.pos_bboxes for res in sampling_results])
mask_feats = self.mask_roi_extractor(
x[:self.mask_roi_extractor.num_inputs], pos_rois)
if self.with_shared_head:
mask_feats = self.shared_head(mask_feats)
else:
pos_inds = []
device = bbox_feats.device
for res in sampling_results:
pos_inds.append(
torch.ones(
res.pos_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds.append(
torch.zeros(
res.neg_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds = torch.cat(pos_inds)
mask_feats = bbox_feats[pos_inds]
mask_pred = self.mask_head(mask_feats)
mask_targets = self.mask_head.get_target(
sampling_results, gt_masks, self.train_cfg.rcnn)
pos_labels = torch.cat(
[res.pos_gt_labels for res in sampling_results])
loss_mask = self.mask_head.loss(mask_pred, mask_targets,
pos_labels)
losses.update(loss_mask)
return losses, loss_latency
# Noted by Jianyuan, 2019/12/30
# For two-stage reg cls roi scale test
'''
def simple_test_bboxes(self,
x,
img_meta,
proposals,
rcnn_test_cfg,
rescale=False):
rois = bbox2roi(proposals)
bbox_cls_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs],
rois,
roi_scale_factor=self.cls_roi_scale_factor)
bbox_reg_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs],
rois,
roi_scale_factor=self.reg_roi_scale_factor)
if self.with_shared_head:
bbox_cls_feats = self.shared_head(bbox_cls_feats)
bbox_reg_feats = self.shared_head(bbox_reg_feats)
cls_score, bbox_pred = self.bbox_head((bbox_cls_feats, bbox_reg_feats))
img_shape = img_meta[0]['img_shape']
scale_factor = img_meta[0]['scale_factor']
det_bboxes, det_labels = self.bbox_head.get_det_bboxes(
rois,
cls_score,
bbox_pred,
img_shape,
scale_factor,
rescale=rescale,
cfg=rcnn_test_cfg)
return det_bboxes, det_labels
'''
# END
def simple_test(self, img, img_meta, proposals=None, rescale=False):
"""Test without augmentation."""
assert self.with_bbox, "Bbox head must be implemented."
out = self.extract_feat(img)
if len(out) >= 4:
x = out
else:
x = out[0]
proposal_list = self.simple_test_rpn(
x, img_meta, self.test_cfg.rpn) if proposals is None else proposals
det_bboxes, det_labels = self.simple_test_bboxes(
x, img_meta, proposal_list, self.test_cfg.rcnn, rescale=rescale)
bbox_results = bbox2result(det_bboxes, det_labels,
self.bbox_head.num_classes)
if not self.with_mask:
return bbox_results
else:
segm_results = self.simple_test_mask(
x, img_meta, det_bboxes, det_labels, rescale=rescale)
return bbox_results, segm_results
def aug_test(self, imgs, img_metas, rescale=False):
"""Test with augmentations.
If rescale is False, then returned bboxes and masks will fit the scale
of imgs[0].
"""
# recompute feats to save memory
proposal_list = self.aug_test_rpn(
self.extract_feats(imgs), img_metas, self.test_cfg.rpn)
det_bboxes, det_labels = self.aug_test_bboxes(
self.extract_feats(imgs), img_metas, proposal_list,
self.test_cfg.rcnn)
if rescale:
_det_bboxes = det_bboxes
else:
_det_bboxes = det_bboxes.clone()
_det_bboxes[:, :4] *= img_metas[0][0]['scale_factor']
bbox_results = bbox2result(_det_bboxes, det_labels,
self.bbox_head.num_classes)
# det_bboxes always keep the original scale
if self.with_mask:
segm_results = self.aug_test_mask(
self.extract_feats(imgs), img_metas, det_bboxes, det_labels)
return bbox_results, segm_results
else:
return bbox_results
|
Cream/CDARTS/CDARTS_detection/mmdet/models/detectors/two_stage.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/detectors/two_stage.py",
"repo_id": "Cream",
"token_count": 7444
}
| 270 |
import numpy as np
import torch
import torch.nn as nn
from mmcv.cnn import kaiming_init, normal_init
from ..builder import build_loss
from ..registry import HEADS
@HEADS.register_module
class MaskIoUHead(nn.Module):
"""Mask IoU Head.
This head predicts the IoU of predicted masks and corresponding gt masks.
"""
def __init__(self,
num_convs=4,
num_fcs=2,
roi_feat_size=14,
in_channels=256,
conv_out_channels=256,
fc_out_channels=1024,
num_classes=81,
loss_iou=dict(type='MSELoss', loss_weight=0.5)):
super(MaskIoUHead, self).__init__()
self.in_channels = in_channels
self.conv_out_channels = conv_out_channels
self.fc_out_channels = fc_out_channels
self.num_classes = num_classes
self.convs = nn.ModuleList()
for i in range(num_convs):
if i == 0:
# concatenation of mask feature and mask prediction
in_channels = self.in_channels + 1
else:
in_channels = self.conv_out_channels
stride = 2 if i == num_convs - 1 else 1
self.convs.append(
nn.Conv2d(
in_channels,
self.conv_out_channels,
3,
stride=stride,
padding=1))
self.fcs = nn.ModuleList()
for i in range(num_fcs):
in_channels = self.conv_out_channels * (
roi_feat_size // 2)**2 if i == 0 else self.fc_out_channels
self.fcs.append(nn.Linear(in_channels, self.fc_out_channels))
self.fc_mask_iou = nn.Linear(self.fc_out_channels, self.num_classes)
self.relu = nn.ReLU()
self.max_pool = nn.MaxPool2d(2, 2)
self.loss_iou = build_loss(loss_iou)
def init_weights(self):
for conv in self.convs:
kaiming_init(conv)
for fc in self.fcs:
kaiming_init(
fc,
a=1,
mode='fan_in',
nonlinearity='leaky_relu',
distribution='uniform')
normal_init(self.fc_mask_iou, std=0.01)
def forward(self, mask_feat, mask_pred):
mask_pred = mask_pred.sigmoid()
mask_pred_pooled = self.max_pool(mask_pred.unsqueeze(1))
x = torch.cat((mask_feat, mask_pred_pooled), 1)
for conv in self.convs:
x = self.relu(conv(x))
x = x.view(x.size(0), -1)
for fc in self.fcs:
x = self.relu(fc(x))
mask_iou = self.fc_mask_iou(x)
return mask_iou
def loss(self, mask_iou_pred, mask_iou_targets):
pos_inds = mask_iou_targets > 0
if pos_inds.sum() > 0:
loss_mask_iou = self.loss_iou(mask_iou_pred[pos_inds],
mask_iou_targets[pos_inds])
else:
loss_mask_iou = mask_iou_pred * 0
return dict(loss_mask_iou=loss_mask_iou)
def get_target(self, sampling_results, gt_masks, mask_pred, mask_targets,
rcnn_train_cfg):
"""Compute target of mask IoU.
Mask IoU target is the IoU of the predicted mask (inside a bbox) and
the gt mask of corresponding gt mask (the whole instance).
The intersection area is computed inside the bbox, and the gt mask area
is computed with two steps, firstly we compute the gt area inside the
bbox, then divide it by the area ratio of gt area inside the bbox and
the gt area of the whole instance.
Args:
sampling_results (list[:obj:`SamplingResult`]): sampling results.
gt_masks (list[ndarray]): Gt masks (the whole instance) of each
image, binary maps with the same shape of the input image.
mask_pred (Tensor): Predicted masks of each positive proposal,
shape (num_pos, h, w).
mask_targets (Tensor): Gt mask of each positive proposal,
binary map of the shape (num_pos, h, w).
rcnn_train_cfg (dict): Training config for R-CNN part.
Returns:
Tensor: mask iou target (length == num positive).
"""
pos_proposals = [res.pos_bboxes for res in sampling_results]
pos_assigned_gt_inds = [
res.pos_assigned_gt_inds for res in sampling_results
]
# compute the area ratio of gt areas inside the proposals and
# the whole instance
area_ratios = map(self._get_area_ratio, pos_proposals,
pos_assigned_gt_inds, gt_masks)
area_ratios = torch.cat(list(area_ratios))
assert mask_targets.size(0) == area_ratios.size(0)
mask_pred = (mask_pred > rcnn_train_cfg.mask_thr_binary).float()
mask_pred_areas = mask_pred.sum((-1, -2))
# mask_pred and mask_targets are binary maps
overlap_areas = (mask_pred * mask_targets).sum((-1, -2))
# compute the mask area of the whole instance
gt_full_areas = mask_targets.sum((-1, -2)) / (area_ratios + 1e-7)
mask_iou_targets = overlap_areas / (
mask_pred_areas + gt_full_areas - overlap_areas)
return mask_iou_targets
def _get_area_ratio(self, pos_proposals, pos_assigned_gt_inds, gt_masks):
"""Compute area ratio of the gt mask inside the proposal and the gt
mask of the corresponding instance"""
num_pos = pos_proposals.size(0)
if num_pos > 0:
area_ratios = []
proposals_np = pos_proposals.cpu().numpy()
pos_assigned_gt_inds = pos_assigned_gt_inds.cpu().numpy()
# compute mask areas of gt instances (batch processing for speedup)
gt_instance_mask_area = gt_masks.sum((-1, -2))
for i in range(num_pos):
gt_mask = gt_masks[pos_assigned_gt_inds[i]]
# crop the gt mask inside the proposal
x1, y1, x2, y2 = proposals_np[i, :].astype(np.int32)
gt_mask_in_proposal = gt_mask[y1:y2 + 1, x1:x2 + 1]
ratio = gt_mask_in_proposal.sum() / (
gt_instance_mask_area[pos_assigned_gt_inds[i]] + 1e-7)
area_ratios.append(ratio)
area_ratios = torch.from_numpy(np.stack(area_ratios)).float().to(
pos_proposals.device)
else:
area_ratios = pos_proposals.new_zeros((0, ))
return area_ratios
def get_mask_scores(self, mask_iou_pred, det_bboxes, det_labels):
"""Get the mask scores.
mask_score = bbox_score * mask_iou
"""
inds = range(det_labels.size(0))
mask_scores = mask_iou_pred[inds, det_labels +
1] * det_bboxes[inds, -1]
mask_scores = mask_scores.cpu().numpy()
det_labels = det_labels.cpu().numpy()
return [
mask_scores[det_labels == i] for i in range(self.num_classes - 1)
]
|
Cream/CDARTS/CDARTS_detection/mmdet/models/mask_heads/maskiou_head.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/mask_heads/maskiou_head.py",
"repo_id": "Cream",
"token_count": 3627
}
| 271 |
from .single_level import SingleRoIExtractor
__all__ = ['SingleRoIExtractor']
|
Cream/CDARTS/CDARTS_detection/mmdet/models/roi_extractors/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/roi_extractors/__init__.py",
"repo_id": "Cream",
"token_count": 25
}
| 272 |
#include <torch/extension.h>
#include <cmath>
#include <vector>
int MaskedIm2colForwardLaucher(const at::Tensor im, const int height,
const int width, const int channels,
const int kernel_h, const int kernel_w,
const int pad_h, const int pad_w,
const at::Tensor mask_h_idx,
const at::Tensor mask_w_idx, const int mask_cnt,
at::Tensor col);
int MaskedCol2imForwardLaucher(const at::Tensor col, const int height,
const int width, const int channels,
const at::Tensor mask_h_idx,
const at::Tensor mask_w_idx, const int mask_cnt,
at::Tensor im);
#define CHECK_CUDA(x) TORCH_CHECK(x.type().is_cuda(), #x, " must be a CUDAtensor ")
#define CHECK_CONTIGUOUS(x) \
TORCH_CHECK(x.is_contiguous(), #x, " must be contiguous ")
#define CHECK_INPUT(x) \
CHECK_CUDA(x); \
CHECK_CONTIGUOUS(x)
int masked_im2col_forward_cuda(const at::Tensor im, const at::Tensor mask_h_idx,
const at::Tensor mask_w_idx, const int kernel_h,
const int kernel_w, const int pad_h,
const int pad_w, at::Tensor col) {
CHECK_INPUT(im);
CHECK_INPUT(mask_h_idx);
CHECK_INPUT(mask_w_idx);
CHECK_INPUT(col);
// im: (n, ic, h, w), kernel size (kh, kw)
// kernel: (oc, ic * kh * kw), col: (kh * kw * ic, ow * oh)
int channels = im.size(1);
int height = im.size(2);
int width = im.size(3);
int mask_cnt = mask_h_idx.size(0);
MaskedIm2colForwardLaucher(im, height, width, channels, kernel_h, kernel_w,
pad_h, pad_w, mask_h_idx, mask_w_idx, mask_cnt,
col);
return 1;
}
int masked_col2im_forward_cuda(const at::Tensor col,
const at::Tensor mask_h_idx,
const at::Tensor mask_w_idx, int height,
int width, int channels, at::Tensor im) {
CHECK_INPUT(col);
CHECK_INPUT(mask_h_idx);
CHECK_INPUT(mask_w_idx);
CHECK_INPUT(im);
// im: (n, ic, h, w), kernel size (kh, kw)
// kernel: (oc, ic * kh * kh), col: (kh * kw * ic, ow * oh)
int mask_cnt = mask_h_idx.size(0);
MaskedCol2imForwardLaucher(col, height, width, channels, mask_h_idx,
mask_w_idx, mask_cnt, im);
return 1;
}
PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
m.def("masked_im2col_forward", &masked_im2col_forward_cuda,
"masked_im2col forward (CUDA)");
m.def("masked_col2im_forward", &masked_col2im_forward_cuda,
"masked_col2im forward (CUDA)");
}
|
Cream/CDARTS/CDARTS_detection/mmdet/ops/masked_conv/src/masked_conv2d_cuda.cpp/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/ops/masked_conv/src/masked_conv2d_cuda.cpp",
"repo_id": "Cream",
"token_count": 1532
}
| 273 |
import torch.nn as nn
from torch.autograd import Function
from torch.autograd.function import once_differentiable
from torch.nn.modules.utils import _pair
from . import roi_align_cuda
class RoIAlignFunction(Function):
@staticmethod
def forward(ctx, features, rois, out_size, spatial_scale, sample_num=0):
out_h, out_w = _pair(out_size)
assert isinstance(out_h, int) and isinstance(out_w, int)
ctx.spatial_scale = spatial_scale
ctx.sample_num = sample_num
ctx.save_for_backward(rois)
ctx.feature_size = features.size()
batch_size, num_channels, data_height, data_width = features.size()
num_rois = rois.size(0)
output = features.new_zeros(num_rois, num_channels, out_h, out_w)
if features.is_cuda:
roi_align_cuda.forward(features, rois, out_h, out_w, spatial_scale,
sample_num, output)
else:
raise NotImplementedError
return output
@staticmethod
@once_differentiable
def backward(ctx, grad_output):
feature_size = ctx.feature_size
spatial_scale = ctx.spatial_scale
sample_num = ctx.sample_num
rois = ctx.saved_tensors[0]
assert (feature_size is not None and grad_output.is_cuda)
batch_size, num_channels, data_height, data_width = feature_size
out_w = grad_output.size(3)
out_h = grad_output.size(2)
grad_input = grad_rois = None
if ctx.needs_input_grad[0]:
grad_input = rois.new_zeros(batch_size, num_channels, data_height,
data_width)
roi_align_cuda.backward(grad_output.contiguous(), rois, out_h,
out_w, spatial_scale, sample_num,
grad_input)
return grad_input, grad_rois, None, None, None
roi_align = RoIAlignFunction.apply
class RoIAlign(nn.Module):
def __init__(self,
out_size,
spatial_scale,
sample_num=0,
use_torchvision=False):
super(RoIAlign, self).__init__()
self.out_size = _pair(out_size)
self.spatial_scale = float(spatial_scale)
self.sample_num = int(sample_num)
self.use_torchvision = use_torchvision
def forward(self, features, rois):
if self.use_torchvision:
from torchvision.ops import roi_align as tv_roi_align
return tv_roi_align(features, rois, self.out_size,
self.spatial_scale, self.sample_num)
else:
return roi_align(features, rois, self.out_size, self.spatial_scale,
self.sample_num)
def __repr__(self):
format_str = self.__class__.__name__
format_str += '(out_size={}, spatial_scale={}, sample_num={}'.format(
self.out_size, self.spatial_scale, self.sample_num)
format_str += ', use_torchvision={})'.format(self.use_torchvision)
return format_str
|
Cream/CDARTS/CDARTS_detection/mmdet/ops/roi_align/roi_align.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/ops/roi_align/roi_align.py",
"repo_id": "Cream",
"token_count": 1502
}
| 274 |
import argparse
import json
from collections import defaultdict
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
def cal_train_time(log_dicts, args):
for i, log_dict in enumerate(log_dicts):
print('{}Analyze train time of {}{}'.format('-' * 5, args.json_logs[i],
'-' * 5))
all_times = []
for epoch in log_dict.keys():
if args.include_outliers:
all_times.append(log_dict[epoch]['time'])
else:
all_times.append(log_dict[epoch]['time'][1:])
all_times = np.array(all_times)
epoch_ave_time = all_times.mean(-1)
slowest_epoch = epoch_ave_time.argmax()
fastest_epoch = epoch_ave_time.argmin()
std_over_epoch = epoch_ave_time.std()
print('slowest epoch {}, average time is {:.4f}'.format(
slowest_epoch + 1, epoch_ave_time[slowest_epoch]))
print('fastest epoch {}, average time is {:.4f}'.format(
fastest_epoch + 1, epoch_ave_time[fastest_epoch]))
print('time std over epochs is {:.4f}'.format(std_over_epoch))
print('average iter time: {:.4f} s/iter'.format(np.mean(all_times)))
print()
def plot_curve(log_dicts, args):
if args.backend is not None:
plt.switch_backend(args.backend)
sns.set_style(args.style)
# if legend is None, use {filename}_{key} as legend
legend = args.legend
if legend is None:
legend = []
for json_log in args.json_logs:
for metric in args.keys:
legend.append('{}_{}'.format(json_log, metric))
assert len(legend) == (len(args.json_logs) * len(args.keys))
metrics = args.keys
num_metrics = len(metrics)
for i, log_dict in enumerate(log_dicts):
epochs = list(log_dict.keys())
for j, metric in enumerate(metrics):
print('plot curve of {}, metric is {}'.format(
args.json_logs[i], metric))
assert metric in log_dict[epochs[
0]], '{} does not contain metric {}'.format(
args.json_logs[i], metric)
if 'mAP' in metric:
xs = np.arange(1, max(epochs) + 1)
ys = []
for epoch in epochs:
ys += log_dict[epoch][metric]
ax = plt.gca()
ax.set_xticks(xs)
plt.xlabel('epoch')
plt.plot(xs, ys, label=legend[i * num_metrics + j], marker='o')
else:
xs = []
ys = []
num_iters_per_epoch = log_dict[epochs[0]]['iter'][-1]
for epoch in epochs:
iters = log_dict[epoch]['iter']
if log_dict[epoch]['mode'][-1] == 'val':
iters = iters[:-1]
xs.append(
np.array(iters) + (epoch - 1) * num_iters_per_epoch)
ys.append(np.array(log_dict[epoch][metric][:len(iters)]))
xs = np.concatenate(xs)
ys = np.concatenate(ys)
plt.xlabel('iter')
plt.plot(
xs, ys, label=legend[i * num_metrics + j], linewidth=0.5)
plt.legend()
if args.title is not None:
plt.title(args.title)
if args.out is None:
plt.show()
else:
print('save curve to: {}'.format(args.out))
plt.savefig(args.out)
plt.cla()
def add_plot_parser(subparsers):
parser_plt = subparsers.add_parser(
'plot_curve', help='parser for plotting curves')
parser_plt.add_argument(
'json_logs',
type=str,
nargs='+',
help='path of train log in json format')
parser_plt.add_argument(
'--keys',
type=str,
nargs='+',
default=['bbox_mAP'],
help='the metric that you want to plot')
parser_plt.add_argument('--title', type=str, help='title of figure')
parser_plt.add_argument(
'--legend',
type=str,
nargs='+',
default=None,
help='legend of each plot')
parser_plt.add_argument(
'--backend', type=str, default=None, help='backend of plt')
parser_plt.add_argument(
'--style', type=str, default='dark', help='style of plt')
parser_plt.add_argument('--out', type=str, default=None)
def add_time_parser(subparsers):
parser_time = subparsers.add_parser(
'cal_train_time',
help='parser for computing the average time per training iteration')
parser_time.add_argument(
'json_logs',
type=str,
nargs='+',
help='path of train log in json format')
parser_time.add_argument(
'--include-outliers',
action='store_true',
help='include the first value of every epoch when computing '
'the average time')
def parse_args():
parser = argparse.ArgumentParser(description='Analyze Json Log')
# currently only support plot curve and calculate average train time
subparsers = parser.add_subparsers(dest='task', help='task parser')
add_plot_parser(subparsers)
add_time_parser(subparsers)
args = parser.parse_args()
return args
def load_json_logs(json_logs):
# load and convert json_logs to log_dict, key is epoch, value is a sub dict
# keys of sub dict is different metrics, e.g. memory, bbox_mAP
# value of sub dict is a list of corresponding values of all iterations
log_dicts = [dict() for _ in json_logs]
for json_log, log_dict in zip(json_logs, log_dicts):
with open(json_log, 'r') as log_file:
for l in log_file:
log = json.loads(l.strip())
epoch = log.pop('epoch')
if epoch not in log_dict:
log_dict[epoch] = defaultdict(list)
for k, v in log.items():
log_dict[epoch][k].append(v)
return log_dicts
def main():
args = parse_args()
json_logs = args.json_logs
for json_log in json_logs:
assert json_log.endswith('.json')
log_dicts = load_json_logs(json_logs)
eval(args.task)(log_dicts, args)
if __name__ == '__main__':
main()
|
Cream/CDARTS/CDARTS_detection/tools/analyze_logs.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/tools/analyze_logs.py",
"repo_id": "Cream",
"token_count": 3122
}
| 275 |
import matplotlib.pyplot as plt
import numpy as np
import torch
def decode_seg_map_sequence(label_masks, dataset='pascal'):
rgb_masks = []
for label_mask in label_masks:
rgb_mask = decode_segmap(label_mask, dataset)
rgb_masks.append(rgb_mask)
rgb_masks = torch.from_numpy(np.array(rgb_masks).transpose([0, 3, 1, 2]))
return rgb_masks
def decode_segmap(label_mask, dataset, plot=False):
"""Decode segmentation class labels into a color image
Args:
label_mask (np.ndarray): an (M,N) array of integer values denoting
the class label at each spatial location.
plot (bool, optional): whether to show the resulting color image
in a figure.
Returns:
(np.ndarray, optional): the resulting decoded color image.
"""
if dataset == 'pascal' or dataset == 'coco':
n_classes = 21
label_colours = get_pascal_labels()
elif dataset == 'cityscapes':
n_classes = 19
label_colours = get_cityscapes_labels()
elif dataset == 'kd':
n_classes = 19
label_colours = get_cityscapes_labels()
else:
raise NotImplementedError
r = label_mask.copy()
g = label_mask.copy()
b = label_mask.copy()
for ll in range(0, n_classes):
r[label_mask == ll] = label_colours[ll, 0]
g[label_mask == ll] = label_colours[ll, 1]
b[label_mask == ll] = label_colours[ll, 2]
rgb = np.zeros((label_mask.shape[0], label_mask.shape[1], 3))
rgb[:, :, 0] = r / 255.0
rgb[:, :, 1] = g / 255.0
rgb[:, :, 2] = b / 255.0
if plot:
plt.imshow(rgb)
plt.show()
else:
return rgb
def encode_segmap(mask):
"""Encode segmentation label images as pascal classes
Args:
mask (np.ndarray): raw segmentation label image of dimension
(M, N, 3), in which the Pascal classes are encoded as colours.
Returns:
(np.ndarray): class map with dimensions (M,N), where the value at
a given location is the integer denoting the class index.
"""
mask = mask.astype(int)
label_mask = np.zeros((mask.shape[0], mask.shape[1]), dtype=np.int16)
for ii, label in enumerate(get_pascal_labels()):
label_mask[np.where(np.all(mask == label, axis=-1))[:2]] = ii
label_mask = label_mask.astype(int)
return label_mask
def get_cityscapes_labels():
return np.array([
[128, 64, 128],
[244, 35, 232],
[70, 70, 70],
[102, 102, 156],
[190, 153, 153],
[153, 153, 153],
[250, 170, 30],
[220, 220, 0],
[107, 142, 35],
[152, 251, 152],
[0, 130, 180],
[220, 20, 60],
[255, 0, 0],
[0, 0, 142],
[0, 0, 70],
[0, 60, 100],
[0, 80, 100],
[0, 0, 230],
[119, 11, 32]])
def get_pascal_labels():
"""Load the mapping that associates pascal classes with label colors
Returns:
np.ndarray with dimensions (21, 3)
"""
return np.asarray([[0, 0, 0], [128, 0, 0], [0, 128, 0], [128, 128, 0],
[0, 0, 128], [128, 0, 128], [0, 128, 128], [128, 128, 128],
[64, 0, 0], [192, 0, 0], [64, 128, 0], [192, 128, 0],
[64, 0, 128], [192, 0, 128], [64, 128, 128], [192, 128, 128],
[0, 64, 0], [128, 64, 0], [0, 192, 0], [128, 192, 0],
[0, 64, 128]])
|
Cream/CDARTS/CDARTS_segmentation/dataloaders/dataloader_utils.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/dataloaders/dataloader_utils.py",
"repo_id": "Cream",
"token_count": 1646
}
| 276 |
# ------------------------------------------------------------------------------
# Builds transformation before data augmentation.
# Written by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
import warnings
import cv2
import math
import numpy as np
class Resize(object):
"""
Applies random scale augmentation.
Reference: https://github.com/tensorflow/models/blob/master/research/deeplab/input_preprocess.py#L28
Arguments:
min_resize_value: Desired size of the smaller image side, no resize if set to None
max_resize_value: Maximum allowed size of the larger image side, no limit if set to None
resize_factor: Resized dimensions are multiple of factor plus one.
keep_aspect_ratio: Boolean, keep aspect ratio or not. If True, the input
will be resized while keeping the original aspect ratio. If False, the
input will be resized to [max_resize_value, max_resize_value] without
keeping the original aspect ratio.
align_corners: If True, exactly align all 4 corners of input and output.
"""
def __init__(self, min_resize_value=None, max_resize_value=None, resize_factor=None,
keep_aspect_ratio=True, align_corners=False):
if min_resize_value is not None and min_resize_value < 0:
min_resize_value = None
if max_resize_value is not None and max_resize_value < 0:
max_resize_value = None
if resize_factor is not None and resize_factor < 0:
resize_factor = None
self.min_resize_value = min_resize_value
self.max_resize_value = max_resize_value
self.resize_factor = resize_factor
self.keep_aspect_ratio = keep_aspect_ratio
self.align_corners = align_corners
if self.align_corners:
warnings.warn('`align_corners = True` is not supported by opencv.')
if self.max_resize_value is not None:
# Modify the max_size to be a multiple of factor plus 1 and make sure the max dimension after resizing
# is no larger than max_size.
if self.resize_factor is not None:
self.max_resize_value = (self.max_resize_value - (self.max_resize_value - 1) % self.resize_factor)
def __call__(self, image, label):
if self.min_resize_value is None:
return image, label
[orig_height, orig_width, _] = image.shape
orig_min_size = np.minimum(orig_height, orig_width)
# Calculate the larger of the possible sizes
large_scale_factor = self.min_resize_value / orig_min_size
large_height = int(math.floor(orig_height * large_scale_factor))
large_width = int(math.floor(orig_width * large_scale_factor))
large_size = np.array([large_height, large_width])
new_size = large_size
if self.max_resize_value is not None:
# Calculate the smaller of the possible sizes, use that if the larger is too big.
orig_max_size = np.maximum(orig_height, orig_width)
small_scale_factor = self.max_resize_value / orig_max_size
small_height = int(math.floor(orig_height * small_scale_factor))
small_width = int(math.floor(orig_width * small_scale_factor))
small_size = np.array([small_height, small_width])
if np.max(large_size) > self.max_resize_value:
new_size = small_size
# Ensure that both output sides are multiples of factor plus one.
if self.resize_factor is not None:
new_size += (self.resize_factor - (new_size - 1) % self.resize_factor) % self.resize_factor
# If new_size exceeds largest allowed size
new_size[new_size > self.max_resize_value] -= self.resize_factor
if not self.keep_aspect_ratio:
# If not keep the aspect ratio, we resize everything to max_size, allowing
# us to do pre-processing without extra padding.
new_size = [np.max(new_size), np.max(new_size)]
# TODO: cv2 uses align_corner=False
# TODO: use fvcore (https://github.com/facebookresearch/fvcore/blob/master/fvcore/transforms/transform.py#L377)
image_dtype = image.dtype
label_dtype = label.dtype
# cv2: (width, height)
image = cv2.resize(image.astype(np.float), (new_size[1], new_size[0]), interpolation=cv2.INTER_LINEAR)
label = cv2.resize(label.astype(np.float), (new_size[1], new_size[0]), interpolation=cv2.INTER_NEAREST)
return image.astype(image_dtype), label.astype(label_dtype)
|
Cream/CDARTS/CDARTS_segmentation/dataloaders/transforms/pre_augmentation_transforms.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/dataloaders/transforms/pre_augmentation_transforms.py",
"repo_id": "Cream",
"token_count": 1863
}
| 277 |
# ------------------------------------------------------------------------------
# Reference: https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/samplers/distributed_sampler.py
# Modified by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
import itertools
import math
from collections import defaultdict
from typing import Optional
import torch
from torch.utils.data.sampler import Sampler
from segmentation.utils import comm
class TrainingSampler(Sampler):
"""
In training, we only care about the "infinite stream" of training data.
So this sampler produces an infinite stream of indices and
all workers cooperate to correctly shuffle the indices and sample different indices.
The samplers in each worker effectively produces `indices[worker_id::num_workers]`
where `indices` is an infinite stream of indices consisting of
`shuffle(range(size)) + shuffle(range(size)) + ...` (if shuffle is True)
or `range(size) + range(size) + ...` (if shuffle is False)
"""
def __init__(self, size, shuffle=True, seed=None):
"""
Args:
size (int): the total number of data of the underlying dataset to sample from
shuffle (bool): whether to shuffle the indices or not
seed (int): the initial seed of the shuffle. Must be the same
across all workers. If None, will use a random seed shared
among workers (require synchronization among all workers).
"""
self._size = size
assert size > 0
self._shuffle = shuffle
if seed is None:
seed = comm.shared_random_seed()
self._seed = int(seed)
self._rank = comm.get_rank()
self._world_size = comm.get_world_size()
def __iter__(self):
start = self._rank
yield from itertools.islice(self._infinite_indices(), start, None, self._world_size)
def __len__(self):
return self._size
def _infinite_indices(self):
g = torch.Generator()
g.manual_seed(self._seed)
while True:
if self._shuffle:
yield from torch.randperm(self._size, generator=g)
else:
yield from torch.arange(self._size)
class InferenceSampler(Sampler):
"""
Produce indices for inference.
Inference needs to run on the __exact__ set of samples,
therefore when the total number of samples is not divisible by the number of workers,
this sampler produces different number of samples on different workers.
"""
def __init__(self, size):
"""
Args:
size (int): the total number of data of the underlying dataset to sample from
"""
self._size = size
assert size > 0
self._rank = comm.get_rank()
self._world_size = comm.get_world_size()
shard_size = (self._size - 1) // self._world_size + 1
begin = shard_size * self._rank
end = min(shard_size * (self._rank + 1), self._size)
self._local_indices = range(begin, end)
def __iter__(self):
yield from self._local_indices
def __len__(self):
return len(self._local_indices)
|
Cream/CDARTS/CDARTS_segmentation/segmentation/data/samplers/distributed_sampler.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/data/samplers/distributed_sampler.py",
"repo_id": "Cream",
"token_count": 1195
}
| 278 |
# ------------------------------------------------------------------------------
# Reference: https://github.com/pytorch/vision/blob/master/torchvision/models/mobilenet.py
# Modified by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
from torch import nn
from torchvision.models.utils import load_state_dict_from_url
__all__ = ['MobileNetV2', 'mobilenet_v2']
model_urls = {
'mobilenet_v2': 'https://download.pytorch.org/models/mobilenet_v2-b0353104.pth',
}
def _make_divisible(v, divisor, min_value=None):
"""
This function is taken from the original tf repo.
It ensures that all layers have a channel number that is divisible by 8
It can be seen here:
https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py
:param v:
:param divisor:
:param min_value:
:return:
"""
if min_value is None:
min_value = divisor
new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_v < 0.9 * v:
new_v += divisor
return new_v
class ConvBNReLU(nn.Sequential):
def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1):
padding = (kernel_size - 1) // 2
super(ConvBNReLU, self).__init__(
nn.Conv2d(in_planes, out_planes, kernel_size, stride, padding, groups=groups, bias=False),
nn.BatchNorm2d(out_planes),
nn.ReLU6(inplace=True)
)
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, expand_ratio):
super(InvertedResidual, self).__init__()
self.stride = stride
assert stride in [1, 2]
hidden_dim = int(round(inp * expand_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
layers = []
if expand_ratio != 1:
# pw
layers.append(ConvBNReLU(inp, hidden_dim, kernel_size=1))
layers.extend([
# dw
ConvBNReLU(hidden_dim, hidden_dim, stride=stride, groups=hidden_dim),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
])
self.conv = nn.Sequential(*layers)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class MobileNetV2(nn.Module):
def __init__(self,
width_mult=1.0,
inverted_residual_setting=None,
round_nearest=8,
block=None):
"""
MobileNet V2 main class
Args:
width_mult (float): Width multiplier - adjusts number of channels in each layer by this amount
inverted_residual_setting: Network structure
round_nearest (int): Round the number of channels in each layer to be a multiple of this number
Set to 1 to turn off rounding
block: Module specifying inverted residual building block for mobilenet
"""
super(MobileNetV2, self).__init__()
if block is None:
block = InvertedResidual
input_channel = 32
last_channel = 1280
if inverted_residual_setting is None:
inverted_residual_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 24, 2, 2],
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
[6, 320, 1, 1],
]
# op(slim.conv2d, stride=2, num_outputs=32, kernel_size=[3, 3]), layer_1
# op(ops.expanded_conv, expansion_size=expand_input(1, divisible_by=1), num_outputs=16), layer_2
# op(ops.expanded_conv, stride=2, num_outputs=24), layer_3
# op(ops.expanded_conv, stride=1, num_outputs=24), layer_4
# op(ops.expanded_conv, stride=2, num_outputs=32), layer_5
# op(ops.expanded_conv, stride=1, num_outputs=32), layer_6
# op(ops.expanded_conv, stride=1, num_outputs=32), layer_7
# op(ops.expanded_conv, stride=2, num_outputs=64), layer_8
# op(ops.expanded_conv, stride=1, num_outputs=64), layer_9
# op(ops.expanded_conv, stride=1, num_outputs=64), layer_10
# op(ops.expanded_conv, stride=1, num_outputs=64), layer_11
# op(ops.expanded_conv, stride=1, num_outputs=96), layer_12
# op(ops.expanded_conv, stride=1, num_outputs=96), layer_13
# op(ops.expanded_conv, stride=1, num_outputs=96), layer_14
# op(ops.expanded_conv, stride=2, num_outputs=160), layer_15
# op(ops.expanded_conv, stride=1, num_outputs=160), layer_16
# op(ops.expanded_conv, stride=1, num_outputs=160), layer_17
# op(ops.expanded_conv, stride=1, num_outputs=320), layer_18 ==> use this
# op(slim.conv2d, stride=1, kernel_size=[1, 1], num_outputs=1280) layer_19
# only check the first element, assuming user knows t,c,n,s are required
if len(inverted_residual_setting) == 0 or len(inverted_residual_setting[0]) != 4:
raise ValueError("inverted_residual_setting should be non-empty "
"or a 4-element list, got {}".format(inverted_residual_setting))
# building first layer
input_channel = _make_divisible(input_channel * width_mult, round_nearest)
self.last_channel = _make_divisible(last_channel * max(1.0, width_mult), round_nearest)
features = [ConvBNReLU(3, input_channel, stride=2)]
# building inverted residual blocks
for t, c, n, s in inverted_residual_setting:
output_channel = _make_divisible(c * width_mult, round_nearest)
for i in range(n):
stride = s if i == 0 else 1
features.append(block(input_channel, output_channel, stride, expand_ratio=t))
input_channel = output_channel
# building last several layers
# features.append(ConvBNReLU(input_channel, self.last_channel, kernel_size=1))
# make it nn.Sequential
self.features = nn.Sequential(*features)
# building classifier
# self.classifier = nn.Sequential(
# nn.Dropout(0.2),
# nn.Linear(self.last_channel, num_classes),
# )
# weight initialization
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.zeros_(m.bias)
def _forward_impl(self, x):
outputs = {}
# This exists since TorchScript doesn't support inheritance, so the superclass method
# (this one) needs to have a name other than `forward` that can be accessed in a subclass
# x = self.features(x)
for i, module in enumerate(self.features):
x = module(x)
outputs['layer_%d' % (i + 1)] = x
# Cannot use "squeeze" as batch-size can be 1 => must use reshape with x.shape[0]
# x = nn.functional.adaptive_avg_pool2d(x, 1).reshape(x.shape[0], -1)
# x = self.classifier(x)
# return x
return outputs
def forward(self, x):
return self._forward_impl(x)
def mobilenet_v2(pretrained=False, progress=True, **kwargs):
"""
Constructs a MobileNetV2 architecture from
`"MobileNetV2: Inverted Residuals and Linear Bottlenecks" <https://arxiv.org/abs/1801.04381>`_.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
model = MobileNetV2(**kwargs)
if pretrained:
state_dict = load_state_dict_from_url(model_urls['mobilenet_v2'],
progress=progress)
model.load_state_dict(state_dict, strict=False)
return model
if __name__ == '__main__':
import torch
model = mobilenet_v2(pretrained=False)
print(model)
data = torch.zeros(1, 3, 224, 224)
results = model.forward(data)
for key in results.keys():
print(key, results[key].size())
|
Cream/CDARTS/CDARTS_segmentation/segmentation/model/backbone/mobilenet.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/model/backbone/mobilenet.py",
"repo_id": "Cream",
"token_count": 3967
}
| 279 |
# ------------------------------------------------------------------------------
# Panoptic-DeepLab meta architecture.
# Written by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
from collections import OrderedDict
import torch
from torch import nn
from torch.nn import functional as F
from .base import BaseSegmentationModel
from segmentation.model.decoder import PanopticDeepLabDecoder
from segmentation.utils import AverageMeter
__all__ = ["PanopticDeepLab"]
class PanopticDeepLab(BaseSegmentationModel):
"""
Implements Panoptic-DeepLab model from
`"Panoptic-DeepLab: A Simple, Strong, and Fast Baseline for Bottom-Up Panoptic Segmentation"
<https://arxiv.org/abs/1911.10194>`_.
Arguments:
backbone (nn.Module): the network used to compute the features for the model.
The backbone should return an OrderedDict[Tensor], with the key being
"out" for the last feature map used, and "aux" if an auxiliary classifier
is used.
in_channels (int): number of input channels from the backbone
feature_key (str): names of input feature from backbone
low_level_channels (list): a list of channels of low-level features
low_level_key (list): a list of name of low-level features used in decoder
low_level_channels_project (list): a list of channels of low-level features after projection in decoder
decoder_channels (int): number of channels in decoder
atrous_rates (tuple): atrous rates for ASPP
num_classes (int): number of classes
semantic_loss (nn.Module): loss function
semantic_loss_weight (float): loss weight
center_loss (nn.Module): loss function
center_loss_weight (float): loss weight
offset_loss (nn.Module): loss function
offset_loss_weight (float): loss weight
**kwargs: arguments for instance head
"""
def __init__(self, backbone, in_channels, feature_key, low_level_channels, low_level_key,
low_level_channels_project, decoder_channels, atrous_rates, num_classes,
semantic_loss, semantic_loss_weight, center_loss, center_loss_weight,
offset_loss, offset_loss_weight, **kwargs):
decoder = PanopticDeepLabDecoder(in_channels, feature_key, low_level_channels, low_level_key,
low_level_channels_project, decoder_channels, atrous_rates, num_classes,
**kwargs)
super(PanopticDeepLab, self).__init__(backbone, decoder)
self.semantic_loss = semantic_loss
self.semantic_loss_weight = semantic_loss_weight
self.loss_meter_dict = OrderedDict()
self.loss_meter_dict['Loss'] = AverageMeter()
self.loss_meter_dict['Semantic loss'] = AverageMeter()
if kwargs.get('has_instance', False):
self.center_loss = center_loss
self.center_loss_weight = center_loss_weight
self.offset_loss = offset_loss
self.offset_loss_weight = offset_loss_weight
self.loss_meter_dict['Center loss'] = AverageMeter()
self.loss_meter_dict['Offset loss'] = AverageMeter()
else:
self.center_loss = None
self.center_loss_weight = 0
self.offset_loss = None
self.offset_loss_weight = 0
# Initialize parameters.
self._init_params()
def _upsample_predictions(self, pred, input_shape):
"""Upsamples final prediction, with special handling to offset.
Args:
pred (dict): stores all output of the segmentation model.
input_shape (tuple): spatial resolution of the desired shape.
Returns:
result (OrderedDict): upsampled dictionary.
"""
# Override upsample method to correctly handle `offset`
result = OrderedDict()
for key in pred.keys():
out = F.interpolate(pred[key], size=input_shape, mode='bilinear', align_corners=True)
if 'offset' in key:
scale = (input_shape[0] - 1) // (pred[key].shape[2] - 1)
out *= scale
result[key] = out
return result
def loss(self, results, targets=None):
batch_size = results['semantic'].size(0)
loss = 0
if targets is not None:
if 'semantic_weights' in targets.keys():
semantic_loss = self.semantic_loss(
results['semantic'], targets['semantic'], semantic_weights=targets['semantic_weights']
) * self.semantic_loss_weight
else:
semantic_loss = self.semantic_loss(
results['semantic'], targets['semantic']) * self.semantic_loss_weight
self.loss_meter_dict['Semantic loss'].update(semantic_loss.detach().cpu().item(), batch_size)
loss += semantic_loss
if self.center_loss is not None:
# Pixel-wise loss weight
center_loss_weights = targets['center_weights'][:, None, :, :].expand_as(results['center'])
center_loss = self.center_loss(results['center'], targets['center']) * center_loss_weights
# safe division
if center_loss_weights.sum() > 0:
center_loss = center_loss.sum() / center_loss_weights.sum() * self.center_loss_weight
else:
center_loss = center_loss.sum() * 0
self.loss_meter_dict['Center loss'].update(center_loss.detach().cpu().item(), batch_size)
loss += center_loss
if self.offset_loss is not None:
# Pixel-wise loss weight
offset_loss_weights = targets['offset_weights'][:, None, :, :].expand_as(results['offset'])
offset_loss = self.offset_loss(results['offset'], targets['offset']) * offset_loss_weights
# safe division
if offset_loss_weights.sum() > 0:
offset_loss = offset_loss.sum() / offset_loss_weights.sum() * self.offset_loss_weight
else:
offset_loss = offset_loss.sum() * 0
self.loss_meter_dict['Offset loss'].update(offset_loss.detach().cpu().item(), batch_size)
loss += offset_loss
# In distributed DataParallel, this is the loss on one machine, need to average the loss again
# in train loop.
results['loss'] = loss
self.loss_meter_dict['Loss'].update(loss.detach().cpu().item(), batch_size)
return results
|
Cream/CDARTS/CDARTS_segmentation/segmentation/model/meta_arch/panoptic_deeplab.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/model/meta_arch/panoptic_deeplab.py",
"repo_id": "Cream",
"token_count": 2832
}
| 280 |
# ------------------------------------------------------------------------------
# Utility functions for multi-scale testing.
# Written by Pingjun (https://github.com/bowenc0221/panoptic-deeplab/issues/25)
# Modified by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
import cv2
from collections import OrderedDict
import numpy as np
import torch
import torch.nn.functional as F
import segmentation.data.transforms.transforms as T
def flip_tensor(x, dim):
"""
Flip Tensor along a dimension
"""
dim = x.dim() + dim if dim < 0 else dim
return x[tuple(slice(None, None) if i != dim
else torch.arange(x.size(i) - 1, -1, -1).long()
for i in range(x.dim()))]
def upsample_predictions(pred, input_shape,scale):
# Override upsample method to correctly handle `offset`
result = OrderedDict()
for key in pred.keys():
out = F.interpolate(pred[key], size=input_shape, mode='bilinear', align_corners=True)
if 'offset' in key: #The order of second dim is (offset_y, offset_x)
out *= 1.0 / scale
result[key] = out
return result
def get_semantic_segmentation(sem):
"""
Post-processing for semantic segmentation branch.
Arguments:
sem: A Tensor of shape [N, C, H, W], where N is the batch size, for consistent, we only
support N=1.
Returns:
A Tensor of shape [1, H, W] (to be gathered by distributed data parallel).
Raises:
ValueError, if batch size is not 1.
"""
if sem.size(0) != 1:
raise ValueError('Only supports inference for batch size = 1')
sem = sem.squeeze(0)
return torch.argmax(sem, dim=0, keepdim=True)
def multi_scale_inference(config, model, raw_image, t_image, device):
scales = config.TEST.SCALE_LIST
flip = config.TEST.FLIP_TEST
# output_stride = 2 ** (5 - sum(config.MODEL.BACKBONE.DILATION))
# train_crop_h, train_crop_w = config.TEST.CROP_SIZE
# scale = 1. / output_stride
# pool_h = int((float(train_crop_h) - 1.0) * scale + 1.0)
# pool_w = int((float(train_crop_w) - 1.0) * scale + 1.0)
# transforms
transforms = T.Compose(
[
T.ToTensor(),
T.Normalize(config.DATASET.MEAN, config.DATASET.STD)
]
)
if flip:
flip_range = 2
else:
flip_range = 1
# h,w,_ = raw_image.shape
_, _, h, w = t_image.shape
org_h_pad = (h + 31) // 32 * 32
org_w_pad = (w + 31) // 32 * 32
sum_semantic_with_flip = 0
sum_center_with_flip = 0
sum_offset_with_flip = 0
for i in range(len(scales)):
image = raw_image
scale = scales[i]
raw_h = int(h * scale)
raw_w = int(w * scale)
image = cv2.resize(image, None, fx=scale, fy=scale, interpolation=cv2.INTER_LINEAR).astype(np.int32)
nh,nw,_ = image.shape
# pad image
new_h = (raw_h + 31) // 32 * 32
new_w = (raw_w + 31) // 32 * 32
input_image = np.zeros((new_h, new_w, 3), dtype=np.uint8)
input_image[:, :] = config.DATASET.MEAN
# input_image[:raw_h, :raw_w, :] = image
input_image[:nh, :nw, :] = image
image, _ = transforms(input_image, None)
image = image.unsqueeze(0).to(device)
model = model.to(device)
for flip in range(flip_range):
if flip:
image = flip_tensor(image, 3)
out_dict = model(image)
for key in out_dict.keys(): # return to raw_input shape
out_dict[key] = out_dict[key][:, :, : raw_h, : raw_w]
if raw_h != org_h_pad or raw_w != org_w_pad:
out_dict = upsample_predictions(out_dict, (org_h_pad, org_w_pad), scale)
# average softmax or logit?
semantic_pred = out_dict['semantic']
# semantic_pred = F.softmax(out_dict['semantic'],dim=1)
center_pred = out_dict['center']
offset_pred = out_dict['offset']
if flip:
semantic_pred = flip_tensor(semantic_pred,3)
center_pred = flip_tensor(center_pred,3)
offset_pred = flip_tensor(offset_pred,3)
offset_pred[:, 1, :, :] *= (-1)
sum_semantic_with_flip += semantic_pred
sum_center_with_flip += center_pred
sum_offset_with_flip += offset_pred
semantic_mean = sum_semantic_with_flip / (flip_range * len(scales))
center_mean = sum_center_with_flip / (flip_range * len(scales))
offset_mean = sum_offset_with_flip / (flip_range * len(scales))
out_dict['semantic'] = semantic_mean
out_dict['center'] = center_mean
out_dict['offset'] = offset_mean
return out_dict
|
Cream/CDARTS/CDARTS_segmentation/segmentation/utils/test_utils.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/utils/test_utils.py",
"repo_id": "Cream",
"token_count": 2171
}
| 281 |
# encoding: utf-8
import os
import time
import numpy as np
import numba
import argparse
from collections import OrderedDict
import torch
import torch.distributed as dist
from engine.logger import get_logger
logger = get_logger()
EPS = 1e-10
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count if self.count != 0 else 0
def to_cuda(batch, device):
if type(batch) == torch.Tensor:
batch = batch.cuda(non_blocking=True)
elif type(batch) == dict:
for key in batch.keys():
batch[key] = to_cuda(batch[key], device)
elif type(batch) == list:
for i in range(len(batch)):
batch[i] = to_cuda(batch[i], device)
return batch
def get_loss_info_str(loss_meter_dict):
msg = ''
for key in loss_meter_dict.keys():
msg += '{name}: {meter.val:.3e} ({meter.avg:.3e})\t'.format(
name=key, meter=loss_meter_dict[key]
)
return msg
def reduce_tensor(tensor, dst=0, op=dist.ReduceOp.SUM, world_size=1):
tensor = tensor.clone()
dist.reduce(tensor, dst, op)
if dist.get_rank() == dst:
tensor.div_(world_size)
return tensor
def all_reduce_tensor(tensor, op=dist.ReduceOp.SUM, world_size=1):
tensor = tensor.clone()
dist.all_reduce(tensor, op)
tensor.div_(world_size)
return tensor
def load_model(model, model_file, is_restore=False):
t_start = time.time()
if isinstance(model_file, str):
state_dict = torch.load(model_file)
if 'model' in state_dict.keys():
state_dict = state_dict['model']
else:
state_dict = model_file
t_ioend = time.time()
if is_restore:
new_state_dict = OrderedDict()
for k, v in state_dict.items():
name = 'module.' + k
new_state_dict[name] = v
state_dict = new_state_dict
model.load_state_dict(state_dict, strict=False)
ckpt_keys = set(state_dict.keys())
own_keys = set(model.state_dict().keys())
missing_keys = own_keys - ckpt_keys
unexpected_keys = ckpt_keys - own_keys
if len(missing_keys) > 0:
logger.warning('Missing key(s) in state_dict: {}'.format(
', '.join('{}'.format(k) for k in missing_keys)))
if len(unexpected_keys) > 0:
logger.warning('Unexpected key(s) in state_dict: {}'.format(
', '.join('{}'.format(k) for k in unexpected_keys)))
del state_dict
t_end = time.time()
logger.info(
"Load model, Time usage:\n\tIO: {}, initialize parameters: {}".format(
t_ioend - t_start, t_end - t_ioend))
return model
def parse_devices(input_devices):
if input_devices.endswith('*'):
devices = list(range(torch.cuda.device_count()))
return devices
devices = []
for d in input_devices.split(','):
if '-' in d:
start_device, end_device = d.split('-')[0], d.split('-')[1]
assert start_device != ''
assert end_device != ''
start_device, end_device = int(start_device), int(end_device)
assert start_device < end_device
assert end_device < torch.cuda.device_count()
for sd in range(start_device, end_device + 1):
devices.append(sd)
else:
device = int(d)
assert device < torch.cuda.device_count()
devices.append(device)
logger.info('using devices {}'.format(
', '.join([str(d) for d in devices])))
return devices
def extant_file(x):
"""
'Type' for argparse - checks that file exists but does not open.
"""
if not os.path.exists(x):
# Argparse uses the ArgumentTypeError to give a rejection message like:
# error: argument input: x does not exist
raise argparse.ArgumentTypeError("{0} does not exist".format(x))
return x
def link_file(src, target):
if os.path.isdir(target) or os.path.isfile(target):
os.remove(target)
os.system('ln -s {} {}'.format(src, target))
def ensure_dir(path):
if not os.path.isdir(path):
os.makedirs(path)
def _dbg_interactive(var, value):
from IPython import embed
embed()
def check_keys(model, pretrained_state_dict):
ckpt_keys = set(pretrained_state_dict.keys())
model_keys = set(model.state_dict().keys())
used_pretrained_keys = model_keys & ckpt_keys
unused_pretrained_keys = ckpt_keys - model_keys
missing_keys = model_keys - ckpt_keys
print('missing keys:{}'.format(missing_keys))
print('unused checkpoint keys:{}'.format(unused_pretrained_keys))
# print('used keys:{}'.format(used_pretrained_keys))
assert len(used_pretrained_keys) > 0, 'load NONE from pretrained checkpoint'
return True
def remove_prefix(state_dict, prefix):
'''
Old style model is stored with all names of parameters share common prefix 'module.'
'''
print('remove prefix \'{}\''.format(prefix))
f = lambda x: x.split(prefix, 1)[-1] if x.startswith(prefix) else x
return {f(key): value for key, value in state_dict.items()}
def load_pretrain(model, pretrained_path):
print('load pretrained model from {}'.format(pretrained_path))
device = torch.cuda.current_device()
pretrained_dict = torch.load(pretrained_path, map_location=lambda storage, loc: storage.cuda(device))
if "state_dict" in pretrained_dict.keys():
pretrained_dict = remove_prefix(pretrained_dict['state_dict'], 'module.')
else:
pretrained_dict = remove_prefix(pretrained_dict, 'module.')
check_keys(model, pretrained_dict)
model.load_state_dict(pretrained_dict, strict=False)
return model
def nanmean(x):
"""Computes the arithmetic mean ignoring any NaNs."""
return torch.mean(x[x == x])
# computes confusion matrix
def _fast_hist(true, pred, num_classes):
mask = (true >= 0) & (true < num_classes)
hist = torch.bincount(
num_classes * true[mask] + pred[mask],
minlength=num_classes ** 2,
).reshape(num_classes, num_classes).float()
return hist
def compute_hist(pred, lb, n_classes, ignore_label):
n_classes = n_classes
keep = torch.logical_not(lb == ignore_label)
merge = pred[keep] * n_classes + lb[keep]
hist = torch.bincount(merge, minlength=n_classes ** 2)
hist = hist.reshape((n_classes, n_classes)).float()
return hist
@numba.jit
def compute_hist_np(pred, lb, n_classes, ignore_label):
n_classes = n_classes
keep = np.logical_not(lb == ignore_label)
merge = pred[keep] * n_classes + lb[keep]
hist = np.bincount(merge, minlength=n_classes ** 2)
hist = hist.reshape((n_classes, n_classes))
return hist
# computes IoU based on confusion matrix
def jaccard_index(hist):
"""Computes the Jaccard index, a.k.a the Intersection over Union (IoU).
Args:
hist: confusion matrix.
Returns:
avg_jacc: the average per-class jaccard index.
"""
A_inter_B = torch.diag(hist)
A = hist.sum(dim=1)
B = hist.sum(dim=0)
jaccard = A_inter_B / (A + B - A_inter_B + EPS)
avg_jacc = nanmean(jaccard) #the mean of jaccard without NaNs
return avg_jacc, jaccard
def check_keys(model, pretrained_state_dict):
ckpt_keys = set(pretrained_state_dict.keys())
model_keys = set(model.state_dict().keys())
used_pretrained_keys = model_keys & ckpt_keys
unused_pretrained_keys = ckpt_keys - model_keys
missing_keys = model_keys - ckpt_keys
print('missing keys:{}'.format(missing_keys))
print('unused checkpoint keys:{}'.format(unused_pretrained_keys))
# print('used keys:{}'.format(used_pretrained_keys))
assert len(used_pretrained_keys) > 0, 'load NONE from pretrained checkpoint'
return True
def remove_prefix(state_dict, prefix):
'''
Old style model is stored with all names of parameters share common prefix 'module.'
'''
print('remove prefix \'{}\''.format(prefix))
f = lambda x: x.split(prefix, 1)[-1] if x.startswith(prefix) else x
return {f(key): value for key, value in state_dict.items()}
def load_pretrain(model, pretrained_path):
print('load pretrained model from {}'.format(pretrained_path))
device = torch.cuda.current_device()
pretrained_dict = torch.load(pretrained_path, map_location=lambda storage, loc: storage.cuda(device))
if "state_dict" in pretrained_dict.keys():
pretrained_dict = remove_prefix(pretrained_dict['state_dict'], 'module.')
# new_dict = {}
# for k in pretrained_dict.keys():
# if "heads" in k:
# continue
# else:
# new_dict[k] = pretrained_dict[k]
# pretrained_dict = new_dict
else:
pretrained_dict = remove_prefix(pretrained_dict, 'module.')
check_keys(model, pretrained_dict)
model.load_state_dict(pretrained_dict, strict=False)
return model
|
Cream/CDARTS/CDARTS_segmentation/tools/utils/pyt_utils.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/tools/utils/pyt_utils.py",
"repo_id": "Cream",
"token_count": 3973
}
| 282 |
_BASE_: Base-PanopticDeepLab-OS16.yaml
MODEL:
WEIGHTS: "detectron2://DeepLab/R-52.pkl"
PIXEL_MEAN: [123.675, 116.280, 103.530]
PIXEL_STD: [58.395, 57.120, 57.375]
BACKBONE:
NAME: "build_resnet_deeplab_backbone"
RESNETS:
DEPTH: 50
NORM: "SyncBN"
RES5_MULTI_GRID: [1, 2, 4]
STEM_TYPE: "deeplab"
STEM_OUT_CHANNELS: 128
STRIDE_IN_1X1: False
PANOPTIC_DEEPLAB:
USE_DEPTHWISE_SEPARABLE_CONV: True
SEM_SEG_HEAD:
USE_DEPTHWISE_SEPARABLE_CONV: True
SOLVER:
MAX_ITER: 90000
INPUT:
FORMAT: "RGB"
CROP:
SIZE: (512, 1024)
|
Cream/CDARTS/CDARTS_segmentation/train/configs/Cityscapes-PanopticSegmentation/panoptic_deeplab_R_52_os16_mg124_poly_90k_bs32_crop_512_1024_dsconv.yaml/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/train/configs/Cityscapes-PanopticSegmentation/panoptic_deeplab_R_52_os16_mg124_poly_90k_bs32_crop_512_1024_dsconv.yaml",
"repo_id": "Cream",
"token_count": 303
}
| 283 |
""" Search cell """
import json
import lib.utils.genotypes as gt
from torchscope import scope
from lib.models.model_test import ModelTest
# config
stem_multiplier = 1
n_classes = 1000
init_channels = 48
model_type = 'imagenet'
cell_file = './genotypes.json'
#stem_multiplier = 3
#n_classes = 10
#init_channels = 36
#model_type = 'cifar'
#cell_file = './genotypes.json'
def main():
file = open(cell_file, 'r')
js = file.read()
r_dict = json.loads(js)
file.close()
genotypes_dict = {}
for layer_idx, genotype in r_dict.items():
genotypes_dict[int(layer_idx)] = gt.from_str(genotype)
model_main = ModelTest(genotypes_dict, model_type, res_stem=False, init_channel=init_channels, \
stem_multiplier=stem_multiplier, n_nodes=4, num_classes=n_classes)
if 'cifar' in model_type:
input_x = (3, 32, 32)
elif 'imagenet' in model_type:
input_x = (3, 224, 224)
else:
raise Exception("Not support dataset!")
scope(model_main, input_size=input_x)
if __name__ == "__main__":
main()
|
Cream/CDARTS/lib/utils/count_flops.py/0
|
{
"file_path": "Cream/CDARTS/lib/utils/count_flops.py",
"repo_id": "Cream",
"token_count": 461
}
| 284 |
from lib.models.blocks.residual_block import get_Bottleneck, get_BasicBlock
from lib.models.blocks.inverted_residual_block import InvertedResidual
|
Cream/Cream/lib/models/blocks/__init__.py/0
|
{
"file_path": "Cream/Cream/lib/models/blocks/__init__.py",
"repo_id": "Cream",
"token_count": 44
}
| 285 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# Written by Hao Du and Houwen Peng
# email: [email protected] and [email protected]
import os
import warnings
import datetime
import torch
import numpy as np
import torch.nn as nn
import _init_paths
from torchscope import scope
from torch.utils.tensorboard import SummaryWriter
# import timm packages
from timm.optim import create_optimizer
from timm.models import resume_checkpoint
from timm.scheduler import create_scheduler
from timm.data import Dataset, create_loader
from timm.utils import ModelEma, update_summary
from timm.loss import LabelSmoothingCrossEntropy
# import apex as distributed package otherwise we use torch.nn.parallel.distributed as distributed package
try:
from apex import amp
from apex.parallel import DistributedDataParallel as DDP
from apex.parallel import convert_syncbn_model
HAS_APEX = True
except ImportError:
from torch.nn.parallel import DistributedDataParallel as DDP
HAS_APEX = False
# import models and training functions
from lib.core.test import validate
from lib.core.retrain import train_epoch
from lib.models.structures.childnet import gen_childnet
from lib.utils.util import parse_config_args, get_logger, get_model_flops_params
from lib.config import DEFAULT_CROP_PCT, IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
def main():
args, cfg = parse_config_args('child net training')
# resolve logging
output_dir = os.path.join(cfg.SAVE_PATH,
"{}-{}".format(datetime.date.today().strftime('%m%d'),
cfg.MODEL))
if args.local_rank == 0:
logger = get_logger(os.path.join(output_dir, 'retrain.log'))
writer = SummaryWriter(os.path.join(output_dir, 'runs'))
else:
writer, logger = None, None
# retrain model selection
if cfg.NET.SELECTION == 481:
arch_list = [
[0], [
3, 4, 3, 1], [
3, 2, 3, 0], [
3, 3, 3, 1, 1], [
3, 3, 3, 3], [
3, 3, 3, 3], [0]]
cfg.DATASET.IMAGE_SIZE = 224
elif cfg.NET.SELECTION == 43:
arch_list = [[0], [3], [3, 1], [3, 1], [3, 3, 3], [3, 3], [0]]
cfg.DATASET.IMAGE_SIZE = 96
elif cfg.NET.SELECTION == 14:
arch_list = [[0], [3], [3, 3], [3, 3], [3], [3], [0]]
cfg.DATASET.IMAGE_SIZE = 64
elif cfg.NET.SELECTION == 114:
arch_list = [[0], [3], [3, 3], [3, 3], [3, 3, 3], [3, 3], [0]]
cfg.DATASET.IMAGE_SIZE = 160
elif cfg.NET.SELECTION == 287:
arch_list = [[0], [3], [3, 3], [3, 1, 3], [3, 3, 3, 3], [3, 3, 3], [0]]
cfg.DATASET.IMAGE_SIZE = 224
elif cfg.NET.SELECTION == 604:
arch_list = [
[0], [
3, 3, 2, 3, 3], [
3, 2, 3, 2, 3], [
3, 2, 3, 2, 3], [
3, 3, 2, 2, 3, 3], [
3, 3, 2, 3, 3, 3], [0]]
cfg.DATASET.IMAGE_SIZE = 224
else:
raise ValueError("Model Retrain Selection is not Supported!")
# define childnet architecture from arch_list
stem = ['ds_r1_k3_s1_e1_c16_se0.25', 'cn_r1_k1_s1_c320_se0.25']
choice_block_pool = ['ir_r1_k3_s2_e4_c24_se0.25',
'ir_r1_k5_s2_e4_c40_se0.25',
'ir_r1_k3_s2_e6_c80_se0.25',
'ir_r1_k3_s1_e6_c96_se0.25',
'ir_r1_k5_s2_e6_c192_se0.25']
arch_def = [[stem[0]]] + [[choice_block_pool[idx]
for repeat_times in range(len(arch_list[idx + 1]))]
for idx in range(len(choice_block_pool))] + [[stem[1]]]
# generate childnet
model = gen_childnet(
arch_list,
arch_def,
num_classes=cfg.DATASET.NUM_CLASSES,
drop_rate=cfg.NET.DROPOUT_RATE,
global_pool=cfg.NET.GP)
# initialize training parameters
eval_metric = cfg.EVAL_METRICS
best_metric, best_epoch, saver = None, None, None
# initialize distributed parameters
distributed = cfg.NUM_GPU > 1
torch.cuda.set_device(args.local_rank)
torch.distributed.init_process_group(backend='nccl', init_method='env://')
if args.local_rank == 0:
logger.info(
'Training on Process {} with {} GPUs.'.format(
args.local_rank, cfg.NUM_GPU))
# fix random seeds
torch.manual_seed(cfg.SEED)
torch.cuda.manual_seed_all(cfg.SEED)
np.random.seed(cfg.SEED)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
# get parameters and FLOPs of model
if args.local_rank == 0:
macs, params = get_model_flops_params(model, input_size=(
1, 3, cfg.DATASET.IMAGE_SIZE, cfg.DATASET.IMAGE_SIZE))
logger.info(
'[Model-{}] Flops: {} Params: {}'.format(cfg.NET.SELECTION, macs, params))
# create optimizer
model = model.cuda()
optimizer = create_optimizer(cfg, model)
# optionally resume from a checkpoint
resume_state, resume_epoch = {}, None
if cfg.AUTO_RESUME:
resume_state, resume_epoch = resume_checkpoint(model, cfg.RESUME_PATH)
optimizer.load_state_dict(resume_state['optimizer'])
del resume_state
model_ema = None
if cfg.NET.EMA.USE:
model_ema = ModelEma(
model,
decay=cfg.NET.EMA.DECAY,
device='cpu' if cfg.NET.EMA.FORCE_CPU else '',
resume=cfg.RESUME_PATH if cfg.AUTO_RESUME else None)
if distributed:
if cfg.BATCHNORM.SYNC_BN:
try:
if HAS_APEX:
model = convert_syncbn_model(model)
else:
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(
model)
if args.local_rank == 0:
logger.info(
'Converted model to use Synchronized BatchNorm.')
except Exception as e:
if args.local_rank == 0:
logger.error(
'Failed to enable Synchronized BatchNorm. Install Apex or Torch >= 1.1 with exception {}'.format(e))
if HAS_APEX:
model = DDP(model, delay_allreduce=True)
else:
if args.local_rank == 0:
logger.info(
"Using torch DistributedDataParallel. Install NVIDIA Apex for Apex DDP.")
# can use device str in Torch >= 1.1
model = DDP(model, device_ids=[args.local_rank])
# imagenet train dataset
train_dir = os.path.join(cfg.DATA_DIR, 'train')
if not os.path.exists(train_dir) and args.local_rank == 0:
logger.error('Training folder does not exist at: {}'.format(train_dir))
exit(1)
dataset_train = Dataset(train_dir)
loader_train = create_loader(
dataset_train,
input_size=(3, cfg.DATASET.IMAGE_SIZE, cfg.DATASET.IMAGE_SIZE),
batch_size=cfg.DATASET.BATCH_SIZE,
is_training=True,
color_jitter=cfg.AUGMENTATION.COLOR_JITTER,
auto_augment=cfg.AUGMENTATION.AA,
num_aug_splits=0,
crop_pct=DEFAULT_CROP_PCT,
mean=IMAGENET_DEFAULT_MEAN,
std=IMAGENET_DEFAULT_STD,
num_workers=cfg.WORKERS,
distributed=distributed,
collate_fn=None,
pin_memory=cfg.DATASET.PIN_MEM,
interpolation='random',
re_mode=cfg.AUGMENTATION.RE_MODE,
re_prob=cfg.AUGMENTATION.RE_PROB
)
# imagenet validation dataset
eval_dir = os.path.join(cfg.DATA_DIR, 'val')
if not os.path.exists(eval_dir) and args.local_rank == 0:
logger.error(
'Validation folder does not exist at: {}'.format(eval_dir))
exit(1)
dataset_eval = Dataset(eval_dir)
loader_eval = create_loader(
dataset_eval,
input_size=(3, cfg.DATASET.IMAGE_SIZE, cfg.DATASET.IMAGE_SIZE),
batch_size=cfg.DATASET.VAL_BATCH_MUL * cfg.DATASET.BATCH_SIZE,
is_training=False,
interpolation='bicubic',
crop_pct=DEFAULT_CROP_PCT,
mean=IMAGENET_DEFAULT_MEAN,
std=IMAGENET_DEFAULT_STD,
num_workers=cfg.WORKERS,
distributed=distributed,
pin_memory=cfg.DATASET.PIN_MEM
)
# whether to use label smoothing
if cfg.AUGMENTATION.SMOOTHING > 0.:
train_loss_fn = LabelSmoothingCrossEntropy(
smoothing=cfg.AUGMENTATION.SMOOTHING).cuda()
validate_loss_fn = nn.CrossEntropyLoss().cuda()
else:
train_loss_fn = nn.CrossEntropyLoss().cuda()
validate_loss_fn = train_loss_fn
# create learning rate scheduler
lr_scheduler, num_epochs = create_scheduler(cfg, optimizer)
start_epoch = resume_epoch if resume_epoch is not None else 0
if start_epoch > 0:
lr_scheduler.step(start_epoch)
if args.local_rank == 0:
logger.info('Scheduled epochs: {}'.format(num_epochs))
try:
best_record, best_ep = 0, 0
for epoch in range(start_epoch, num_epochs):
if distributed:
loader_train.sampler.set_epoch(epoch)
train_metrics = train_epoch(
epoch,
model,
loader_train,
optimizer,
train_loss_fn,
cfg,
lr_scheduler=lr_scheduler,
saver=saver,
output_dir=output_dir,
model_ema=model_ema,
logger=logger,
writer=writer,
local_rank=args.local_rank)
eval_metrics = validate(
epoch,
model,
loader_eval,
validate_loss_fn,
cfg,
logger=logger,
writer=writer,
local_rank=args.local_rank)
if model_ema is not None and not cfg.NET.EMA.FORCE_CPU:
ema_eval_metrics = validate(
epoch,
model_ema.ema,
loader_eval,
validate_loss_fn,
cfg,
log_suffix='_EMA',
logger=logger,
writer=writer,
local_rank=args.local_rank)
eval_metrics = ema_eval_metrics
if lr_scheduler is not None:
lr_scheduler.step(epoch + 1, eval_metrics[eval_metric])
update_summary(epoch, train_metrics, eval_metrics, os.path.join(
output_dir, 'summary.csv'), write_header=best_metric is None)
if saver is not None:
# save proper checkpoint with eval metric
save_metric = eval_metrics[eval_metric]
best_metric, best_epoch = saver.save_checkpoint(
model, optimizer, cfg,
epoch=epoch, model_ema=model_ema, metric=save_metric)
if best_record < eval_metrics[eval_metric]:
best_record = eval_metrics[eval_metric]
best_ep = epoch
if args.local_rank == 0:
logger.info(
'*** Best metric: {0} (epoch {1})'.format(best_record, best_ep))
except KeyboardInterrupt:
pass
if best_metric is not None:
logger.info(
'*** Best metric: {0} (epoch {1})'.format(best_metric, best_epoch))
if __name__ == '__main__':
main()
|
Cream/Cream/tools/retrain.py/0
|
{
"file_path": "Cream/Cream/tools/retrain.py",
"repo_id": "Cream",
"token_count": 5896
}
| 286 |
# --------------------------------------------------------
# Efficient Main (train/validate)
# Copyright (c) 2022 Microsoft
# Adapted from LeViT and Swin Transformer
# LeViT: (https://github.com/facebookresearch/levit)
# Swin: (https://github.com/microsoft/swin-transformer)
# --------------------------------------------------------
import argparse
import datetime
import numpy as np
import time
import torch
import torch.backends.cudnn as cudnn
import json
import os
from pathlib import Path
from timm.data import Mixup
from timm.models import create_model
from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy
from timm.scheduler import create_scheduler
from timm.optim import create_optimizer
from timm.utils import NativeScaler, get_state_dict, ModelEma
from data.samplers import RASampler
from data.datasets import build_dataset
from data.threeaugment import new_data_aug_generator
from engine import train_one_epoch, evaluate
from losses import DistillationLoss
from model import build
import utils
def get_args_parser():
parser = argparse.ArgumentParser(
'EfficientViT training and evaluation script', add_help=False)
parser.add_argument('--batch-size', default=256, type=int)
parser.add_argument('--epochs', default=300, type=int)
# Model parameters
parser.add_argument('--model', default='EfficientViT_M4', type=str, metavar='MODEL',
help='Name of model to train')
parser.add_argument('--input-size', default=224,
type=int, help='images input size')
parser.add_argument('--model-ema', action='store_true')
parser.add_argument(
'--no-model-ema', action='store_false', dest='model_ema')
parser.set_defaults(model_ema=True)
parser.add_argument('--model-ema-decay', type=float,
default=0.99996, help='')
parser.add_argument('--model-ema-force-cpu',
action='store_true', default=False, help='')
# Optimizer parameters
parser.add_argument('--opt', default='adamw', type=str, metavar='OPTIMIZER',
help='Optimizer (default: "adamw"')
parser.add_argument('--opt-eps', default=1e-8, type=float, metavar='EPSILON',
help='Optimizer Epsilon (default: 1e-8)')
parser.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA',
help='Optimizer Betas (default: None, use opt default)')
parser.add_argument('--clip-grad', type=float, default=0.02, metavar='NORM',
help='Clip gradient norm (default: None, no clipping)')
parser.add_argument('--clip-mode', type=str, default='agc',
help='Gradient clipping mode. One of ("norm", "value", "agc")')
parser.add_argument('--momentum', type=float, default=0.9, metavar='M',
help='SGD momentum (default: 0.9)')
parser.add_argument('--weight-decay', type=float, default=0.025,
help='weight decay (default: 0.025)')
# Learning rate schedule parameters
parser.add_argument('--sched', default='cosine', type=str, metavar='SCHEDULER',
help='LR scheduler (default: "cosine"')
parser.add_argument('--lr', type=float, default=1e-3, metavar='LR',
help='learning rate (default: 1e-3)')
parser.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct',
help='learning rate noise on/off epoch percentages')
parser.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT',
help='learning rate noise limit percent (default: 0.67)')
parser.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV',
help='learning rate noise std-dev (default: 1.0)')
parser.add_argument('--warmup-lr', type=float, default=1e-6, metavar='LR',
help='warmup learning rate (default: 1e-6)')
parser.add_argument('--min-lr', type=float, default=1e-5, metavar='LR',
help='lower lr bound for cyclic schedulers that hit 0 (1e-5)')
parser.add_argument('--decay-epochs', type=float, default=30, metavar='N',
help='epoch interval to decay LR')
parser.add_argument('--warmup-epochs', type=int, default=5, metavar='N',
help='epochs to warmup LR, if scheduler supports')
parser.add_argument('--cooldown-epochs', type=int, default=10, metavar='N',
help='epochs to cooldown LR at min_lr, after cyclic schedule ends')
parser.add_argument('--patience-epochs', type=int, default=10, metavar='N',
help='patience epochs for Plateau LR scheduler (default: 10')
parser.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE',
help='LR decay rate (default: 0.1)')
# Augmentation parameters
parser.add_argument('--ThreeAugment', action='store_true')
parser.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT',
help='Color jitter factor (default: 0.4)')
parser.add_argument('--aa', type=str, default='rand-m9-mstd0.5-inc1', metavar='NAME',
help='Use AutoAugment policy. "v0" or "original". " + \
"(default: rand-m9-mstd0.5-inc1)'),
parser.add_argument('--smoothing', type=float, default=0.1,
help='Label smoothing (default: 0.1)')
parser.add_argument('--train-interpolation', type=str, default='bicubic',
help='Training interpolation (random, bilinear, bicubic default: "bicubic")')
parser.add_argument('--repeated-aug', action='store_true')
parser.add_argument('--no-repeated-aug',
action='store_false', dest='repeated_aug')
parser.set_defaults(repeated_aug=True)
# Random Erase params
parser.add_argument('--reprob', type=float, default=0.25, metavar='PCT',
help='Random erase prob (default: 0.25)')
parser.add_argument('--remode', type=str, default='pixel',
help='Random erase mode (default: "pixel")')
parser.add_argument('--recount', type=int, default=1,
help='Random erase count (default: 1)')
parser.add_argument('--resplit', action='store_true', default=False,
help='Do not random erase first (clean) augmentation split')
# Mixup params
parser.add_argument('--mixup', type=float, default=0.8,
help='mixup alpha, mixup enabled if > 0. (default: 0.8)')
parser.add_argument('--cutmix', type=float, default=1.0,
help='cutmix alpha, cutmix enabled if > 0. (default: 1.0)')
parser.add_argument('--cutmix-minmax', type=float, nargs='+', default=None,
help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)')
parser.add_argument('--mixup-prob', type=float, default=1.0,
help='Probability of performing mixup or cutmix when either/both is enabled')
parser.add_argument('--mixup-switch-prob', type=float, default=0.5,
help='Probability of switching to cutmix when both mixup and cutmix enabled')
parser.add_argument('--mixup-mode', type=str, default='batch',
help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"')
# Distillation parameters
parser.add_argument('--teacher-model', default='regnety_160', type=str, metavar='MODEL',
help='Name of teacher model to train (default: "regnety_160"')
parser.add_argument('--teacher-path', type=str,
default='https://dl.fbaipublicfiles.com/deit/regnety_160-a5fe301d.pth')
parser.add_argument('--distillation-type', default='none',
choices=['none', 'soft', 'hard'], type=str, help="")
parser.add_argument('--distillation-alpha',
default=0.5, type=float, help="")
parser.add_argument('--distillation-tau', default=1.0, type=float, help="")
# Finetuning params
parser.add_argument('--finetune', default='',
help='finetune from checkpoint')
parser.add_argument('--set_bn_eval', action='store_true', default=False,
help='set BN layers to eval mode during finetuning.')
# Dataset parameters
parser.add_argument('--data-path', default='/root/FastBaseline/data/imagenet', type=str,
help='dataset path')
parser.add_argument('--data-set', default='IMNET', choices=['CIFAR', 'IMNET', 'INAT', 'INAT19'],
type=str, help='Image Net dataset path')
parser.add_argument('--inat-category', default='name',
choices=['kingdom', 'phylum', 'class', 'order',
'supercategory', 'family', 'genus', 'name'],
type=str, help='semantic granularity')
parser.add_argument('--output_dir', default='',
help='path where to save, empty for no saving')
parser.add_argument('--device', default='cuda',
help='device to use for training / testing')
parser.add_argument('--seed', default=0, type=int)
parser.add_argument('--resume', default='', help='resume from checkpoint')
parser.add_argument('--start_epoch', default=0, type=int, metavar='N',
help='start epoch')
parser.add_argument('--eval', action='store_true',
help='Perform evaluation only')
parser.add_argument('--dist-eval', action='store_true',
default=False, help='Enabling distributed evaluation')
parser.add_argument('--num_workers', default=10, type=int)
parser.add_argument('--pin-mem', action='store_true',
help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.')
parser.add_argument('--no-pin-mem', action='store_false', dest='pin_mem',
help='')
parser.set_defaults(pin_mem=True)
# training parameters
parser.add_argument('--world_size', default=1, type=int,
help='number of distributed processes')
parser.add_argument('--dist_url', default='env://',
help='url used to set up distributed training')
parser.add_argument('--save_freq', default=1, type=int,
help='frequency of model saving')
return parser
def main(args):
utils.init_distributed_mode(args)
if args.distillation_type != 'none' and args.finetune and not args.eval:
raise NotImplementedError(
"Finetuning with distillation not yet supported")
device = torch.device(args.device)
# fix the seed for reproducibility
seed = args.seed + utils.get_rank()
torch.manual_seed(seed)
np.random.seed(seed)
# random.seed(seed)
cudnn.benchmark = True
dataset_train, args.nb_classes = build_dataset(is_train=True, args=args)
dataset_val, _ = build_dataset(is_train=False, args=args)
if True: # args.distributed:
num_tasks = utils.get_world_size()
global_rank = utils.get_rank()
if args.repeated_aug:
sampler_train = RASampler(
dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True
)
else:
sampler_train = torch.utils.data.DistributedSampler(
dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True
)
if args.dist_eval:
if len(dataset_val) % num_tasks != 0:
print('Warning: Enabling distributed evaluation with an eval dataset not divisible by process number. '
'This will slightly alter validation results as extra duplicate entries are added to achieve '
'equal num of samples per-process.')
sampler_val = torch.utils.data.DistributedSampler(
dataset_val, num_replicas=num_tasks, rank=global_rank, shuffle=False)
else:
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
else:
sampler_train = torch.utils.data.RandomSampler(dataset_train)
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
data_loader_train = torch.utils.data.DataLoader(
dataset_train, sampler=sampler_train,
batch_size=args.batch_size,
num_workers=args.num_workers,
pin_memory=args.pin_mem,
drop_last=True,
)
if args.ThreeAugment:
data_loader_train.dataset.transform = new_data_aug_generator(args)
data_loader_val = torch.utils.data.DataLoader(
dataset_val, sampler=sampler_val,
batch_size=int(1.5 * args.batch_size),
num_workers=args.num_workers,
pin_memory=args.pin_mem,
drop_last=False
)
mixup_fn = None
mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None
if mixup_active:
mixup_fn = Mixup(
mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax,
prob=args.mixup_prob, switch_prob=args.mixup_switch_prob, mode=args.mixup_mode,
label_smoothing=args.smoothing, num_classes=args.nb_classes)
print(f"Creating model: {args.model}")
model = create_model(
args.model,
num_classes=args.nb_classes,
distillation=(args.distillation_type != 'none'),
pretrained=False,
fuse=False,
)
if args.finetune:
if args.finetune.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.finetune, map_location='cpu', check_hash=True)
else:
checkpoint = utils.load_model(args.finetune, model)
checkpoint_model = checkpoint['model']
state_dict = model.state_dict()
for k in ['head.l.weight', 'head.l.bias',
'head_dist.l.weight', 'head_dist.l.bias']:
if k in checkpoint_model and checkpoint_model[k].shape != state_dict[k].shape:
print(f"Removing key {k} from pretrained checkpoint")
del checkpoint_model[k]
msg = model.load_state_dict(checkpoint_model, strict=False)
print(msg)
model.to(device)
model_ema = None
if args.model_ema:
# Important to create EMA model after cuda(), DP wrapper, and AMP but
# before SyncBN and DDP wrapper
model_ema = ModelEma(
model,
decay=args.model_ema_decay,
device='cpu' if args.model_ema_force_cpu else '',
resume='')
model_without_ddp = model
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(
model, device_ids=[args.gpu])
model_without_ddp = model.module
n_parameters = sum(p.numel()
for p in model.parameters() if p.requires_grad)
print('number of params:', n_parameters)
linear_scaled_lr = args.lr * args.batch_size * utils.get_world_size() / 512.0
args.lr = linear_scaled_lr
optimizer = create_optimizer(args, model_without_ddp)
loss_scaler = NativeScaler()
lr_scheduler, _ = create_scheduler(args, optimizer)
criterion = LabelSmoothingCrossEntropy()
if args.mixup > 0.:
# smoothing is handled with mixup label transform
criterion = SoftTargetCrossEntropy()
elif args.smoothing:
criterion = LabelSmoothingCrossEntropy(smoothing=args.smoothing)
else:
criterion = torch.nn.CrossEntropyLoss()
teacher_model = None
if args.distillation_type != 'none':
assert args.teacher_path, 'need to specify teacher-path when using distillation'
print(f"Creating teacher model: {args.teacher_model}")
teacher_model = create_model(
args.teacher_model,
pretrained=False,
num_classes=args.nb_classes,
global_pool='avg',
)
if args.teacher_path.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.teacher_path, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.teacher_path, map_location='cpu')
teacher_model.load_state_dict(checkpoint['model'])
teacher_model.to(device)
teacher_model.eval()
# wrap the criterion in our custom DistillationLoss, which
# just dispatches to the original criterion if args.distillation_type is
# 'none'
criterion = DistillationLoss(
criterion, teacher_model, args.distillation_type, args.distillation_alpha, args.distillation_tau
)
output_dir = Path(args.output_dir)
if args.output_dir and utils.is_main_process():
with (output_dir / "model.txt").open("a") as f:
f.write(str(model))
if args.output_dir and utils.is_main_process():
with (output_dir / "args.txt").open("a") as f:
f.write(json.dumps(args.__dict__, indent=2) + "\n")
if args.resume:
if args.resume.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.resume, map_location='cpu', check_hash=True)
else:
print("Loading local checkpoint at {}".format(args.resume))
checkpoint = torch.load(args.resume, map_location='cpu')
msg = model_without_ddp.load_state_dict(checkpoint['model'])
print(msg)
if not args.eval and 'optimizer' in checkpoint and 'lr_scheduler' in checkpoint and 'epoch' in checkpoint:
optimizer.load_state_dict(checkpoint['optimizer'])
lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
args.start_epoch = checkpoint['epoch'] + 1
if args.model_ema:
utils._load_checkpoint_for_ema(
model_ema, checkpoint['model_ema'])
if 'scaler' in checkpoint:
loss_scaler.load_state_dict(checkpoint['scaler'])
if args.eval:
# utils.replace_batchnorm(model) # Users may choose whether to merge Conv-BN layers during eval
print(f"Evaluating model: {args.model}")
test_stats = evaluate(data_loader_val, model, device)
print(
f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%")
return
print(f"Start training for {args.epochs} epochs")
start_time = time.time()
max_accuracy = 0.0
max_accuracy_ema = 0.0
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
data_loader_train.sampler.set_epoch(epoch)
train_stats = train_one_epoch(
model, criterion, data_loader_train,
optimizer, device, epoch, loss_scaler,
args.clip_grad, args.clip_mode, model_ema, mixup_fn,
# set_training_mode=args.finetune == '' # keep in eval mode during finetuning
set_training_mode=True,
set_bn_eval=args.set_bn_eval, # set bn to eval if finetune
)
lr_scheduler.step(epoch)
test_stats = evaluate(data_loader_val, model, device)
print(
f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%")
if args.output_dir:
if epoch % args.save_freq == 0 or epoch == args.epochs - 1:
ckpt_path = os.path.join(output_dir, 'checkpoint_'+str(epoch)+'.pth')
checkpoint_paths = [ckpt_path]
print("Saving checkpoint to {}".format(ckpt_path))
for checkpoint_path in checkpoint_paths:
utils.save_on_master({
'model': model_without_ddp.state_dict(),
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict(),
'epoch': epoch,
'model_ema': get_state_dict(model_ema),
'scaler': loss_scaler.state_dict(),
'args': args,
}, checkpoint_path)
max_accuracy = max(max_accuracy, test_stats["acc1"])
print(f'Max accuracy: {max_accuracy:.2f}%')
log_stats = {**{f'train_{k}': v for k, v in train_stats.items()},
**{f'test_{k}': v for k, v in test_stats.items()},
'epoch': epoch,
'n_parameters': n_parameters}
if args.output_dir and utils.is_main_process():
with (output_dir / "log.txt").open("a") as f:
f.write(json.dumps(log_stats) + "\n")
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print('Training time {}'.format(total_time_str))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
'EfficientViT training and evaluation script', parents=[get_args_parser()])
args = parser.parse_args()
if args.output_dir:
Path(args.output_dir).mkdir(parents=True, exist_ok=True)
main(args)
|
Cream/EfficientViT/classification/main.py/0
|
{
"file_path": "Cream/EfficientViT/classification/main.py",
"repo_id": "Cream",
"token_count": 9525
}
| 287 |
# dataset settings
dataset_type = 'VOCDataset'
data_root = 'data/VOCdevkit/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1000, 600), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1000, 600),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type='RepeatDataset',
times=3,
dataset=dict(
type=dataset_type,
ann_file=[
data_root + 'VOC2007/ImageSets/Main/trainval.txt',
data_root + 'VOC2012/ImageSets/Main/trainval.txt'
],
img_prefix=[data_root + 'VOC2007/', data_root + 'VOC2012/'],
pipeline=train_pipeline)),
val=dict(
type=dataset_type,
ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt',
img_prefix=data_root + 'VOC2007/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt',
img_prefix=data_root + 'VOC2007/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='mAP')
|
Cream/EfficientViT/downstream/configs/_base_/datasets/voc0712.py/0
|
{
"file_path": "Cream/EfficientViT/downstream/configs/_base_/datasets/voc0712.py",
"repo_id": "Cream",
"token_count": 943
}
| 288 |
# model settings
model = dict(
type='RetinaNet',
pretrained='torchvision://resnet50',
backbone=dict(
type='EfficientViT_M4',
pretrained="",),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=1,
add_extra_convs='on_input',
num_outs=5),
bbox_head=dict(
type='RetinaHead',
num_classes=80,
in_channels=256,
stacked_convs=4,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
octave_base_scale=4,
scales_per_octave=3,
ratios=[0.5, 1.0, 2.0],
strides=[8, 16, 32, 64, 128]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0)),
# training and testing settings
train_cfg=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.4,
min_pos_iou=0,
ignore_iof_thr=-1),
allowed_border=-1,
pos_weight=-1,
debug=False),
test_cfg=dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100))
|
Cream/EfficientViT/downstream/configs/_base_/models/retinanet_efficientvit_fpn.py/0
|
{
"file_path": "Cream/EfficientViT/downstream/configs/_base_/models/retinanet_efficientvit_fpn.py",
"repo_id": "Cream",
"token_count": 916
}
| 289 |
# Copyright (c) Open-MMLab. All rights reserved.
from .checkpoint import save_checkpoint
from .epoch_based_runner import EpochBasedRunnerAmp
__all__ = [
'EpochBasedRunnerAmp', 'save_checkpoint'
]
|
Cream/EfficientViT/downstream/mmcv_custom/runner/__init__.py/0
|
{
"file_path": "Cream/EfficientViT/downstream/mmcv_custom/runner/__init__.py",
"repo_id": "Cream",
"token_count": 69
}
| 290 |
import argparse
import datetime
import numpy as np
import time
import torch
import torch.backends.cudnn as cudnn
import json
import os
from pathlib import Path
from timm.data import Mixup
try:
from timm.data import DatasetTar
except ImportError:
# for higher version of timm
from timm.data import ImageDataset as DatasetTar
from timm.models import create_model
from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy
from timm.scheduler import create_scheduler
from timm.optim import create_optimizer
from timm.utils import NativeScaler, get_state_dict, ModelEma
from datasets import build_dataset, build_transform
from engine import train_one_epoch, evaluate
from losses import DistillationLoss
from samplers import RASampler
import utils
import models
import mini_deit_models
def get_args_parser():
parser = argparse.ArgumentParser('Mini-DeiT training and evaluation script', add_help=False)
parser.add_argument('--batch-size', default=64, type=int)
parser.add_argument('--epochs', default=300, type=int)
# Model parameters
parser.add_argument('--model', default='deit_base_patch16_224', type=str, metavar='MODEL',
help='Name of model to train')
parser.add_argument('--pretrained', action='store_true', default=False,
help='Start with pretrained version of specified network (if avail)')
parser.add_argument('--input-size', default=224, type=int, help='images input size')
parser.add_argument('--drop', type=float, default=0.0, metavar='PCT',
help='Dropout rate (default: 0.)')
parser.add_argument('--drop-path', type=float, default=0.1, metavar='PCT',
help='Drop path rate (default: 0.1)')
parser.add_argument('--model-ema', action='store_true')
parser.add_argument('--no-model-ema', action='store_false', dest='model_ema')
parser.set_defaults(model_ema=True)
parser.add_argument('--model-ema-decay', type=float, default=0.99996, help='')
parser.add_argument('--model-ema-force-cpu', action='store_true', default=False, help='')
# Optimizer parameters
parser.add_argument('--opt', default='adamw', type=str, metavar='OPTIMIZER',
help='Optimizer (default: "adamw"')
parser.add_argument('--opt-eps', default=1e-8, type=float, metavar='EPSILON',
help='Optimizer Epsilon (default: 1e-8)')
parser.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA',
help='Optimizer Betas (default: None, use opt default)')
parser.add_argument('--clip-grad', type=float, default=None, metavar='NORM',
help='Clip gradient norm (default: None, no clipping)')
parser.add_argument('--momentum', type=float, default=0.9, metavar='M',
help='SGD momentum (default: 0.9)')
parser.add_argument('--weight-decay', type=float, default=0.05,
help='weight decay (default: 0.05)')
# Learning rate schedule parameters
parser.add_argument('--sched', default='cosine', type=str, metavar='SCHEDULER',
help='LR scheduler (default: "cosine"')
parser.add_argument('--lr', type=float, default=5e-4, metavar='LR',
help='learning rate (default: 5e-4)')
parser.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct',
help='learning rate noise on/off epoch percentages')
parser.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT',
help='learning rate noise limit percent (default: 0.67)')
parser.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV',
help='learning rate noise std-dev (default: 1.0)')
parser.add_argument('--warmup-lr', type=float, default=1e-6, metavar='LR',
help='warmup learning rate (default: 1e-6)')
parser.add_argument('--min-lr', type=float, default=1e-5, metavar='LR',
help='lower lr bound for cyclic schedulers that hit 0 (1e-5)')
parser.add_argument('--decay-epochs', type=float, default=30, metavar='N',
help='epoch interval to decay LR')
parser.add_argument('--warmup-epochs', type=int, default=5, metavar='N',
help='epochs to warmup LR, if scheduler supports')
parser.add_argument('--cooldown-epochs', type=int, default=10, metavar='N',
help='epochs to cooldown LR at min_lr, after cyclic schedule ends')
parser.add_argument('--patience-epochs', type=int, default=10, metavar='N',
help='patience epochs for Plateau LR scheduler (default: 10')
parser.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE',
help='LR decay rate (default: 0.1)')
# Augmentation parameters
parser.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT',
help='Color jitter factor (default: 0.4)')
parser.add_argument('--aa', type=str, default='rand-m9-mstd0.5-inc1', metavar='NAME',
help='Use AutoAugment policy. "v0" or "original". " + \
"(default: rand-m9-mstd0.5-inc1)'),
parser.add_argument('--smoothing', type=float, default=0.1, help='Label smoothing (default: 0.1)')
parser.add_argument('--train-interpolation', type=str, default='bicubic',
help='Training interpolation (random, bilinear, bicubic default: "bicubic")')
parser.add_argument('--repeated-aug', action='store_true')
parser.add_argument('--no-repeated-aug', action='store_false', dest='repeated_aug')
parser.add_argument('--load-tar', action='store_true', help='Loading *.tar files for dataset')
parser.set_defaults(repeated_aug=True)
# * Random Erase params
parser.add_argument('--reprob', type=float, default=0.25, metavar='PCT',
help='Random erase prob (default: 0.25)')
parser.add_argument('--remode', type=str, default='pixel',
help='Random erase mode (default: "pixel")')
parser.add_argument('--recount', type=int, default=1,
help='Random erase count (default: 1)')
parser.add_argument('--resplit', action='store_true', default=False,
help='Do not random erase first (clean) augmentation split')
# * Mixup params
parser.add_argument('--mixup', type=float, default=0.8,
help='mixup alpha, mixup enabled if > 0. (default: 0.8)')
parser.add_argument('--cutmix', type=float, default=1.0,
help='cutmix alpha, cutmix enabled if > 0. (default: 1.0)')
parser.add_argument('--cutmix-minmax', type=float, nargs='+', default=None,
help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)')
parser.add_argument('--mixup-prob', type=float, default=1.0,
help='Probability of performing mixup or cutmix when either/both is enabled')
parser.add_argument('--mixup-switch-prob', type=float, default=0.5,
help='Probability of switching to cutmix when both mixup and cutmix enabled')
parser.add_argument('--mixup-mode', type=str, default='batch',
help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"')
# Distillation parameters
parser.add_argument('--teacher-model', default='regnety_160', type=str, metavar='MODEL',
help='Name of teacher model to train (default: "regnety_160"')
parser.add_argument('--teacher-path', type=str, default='')
parser.add_argument('--distillation-type', default='none', choices=['none', 'soft', 'hard'], type=str, help="")
parser.add_argument('--distillation-alpha', default=0.5, type=float, help="")
parser.add_argument('--distillation-tau', default=1.0, type=float, help="")
# * Finetuning params
parser.add_argument('--finetune', default='', help='finetune from checkpoint')
# Dataset parameters
parser.add_argument('--data-path', default='/datasets01/imagenet_full_size/061417/', type=str,
help='dataset path')
parser.add_argument('--data-set', default='IMNET', choices=['CIFAR100', 'CIFAR10', 'IMNET', 'INAT', 'INAT19'],
type=str, help='Image Net dataset path')
parser.add_argument('--inat-category', default='name',
choices=['kingdom', 'phylum', 'class', 'order', 'supercategory', 'family', 'genus', 'name'],
type=str, help='semantic granularity')
parser.add_argument('--output_dir', default='',
help='path where to save, empty for no saving')
parser.add_argument('--device', default='cuda',
help='device to use for training / testing')
parser.add_argument('--seed', default=0, type=int)
parser.add_argument('--resume', default='', help='resume from checkpoint')
parser.add_argument('--start_epoch', default=0, type=int, metavar='N',
help='start epoch')
parser.add_argument('--eval', action='store_true', help='Perform evaluation only')
parser.add_argument('--dist-eval', action='store_true', default=True, help='Enabling distributed evaluation')
parser.add_argument('--num_workers', default=10, type=int)
parser.add_argument('--pin-mem', action='store_true',
help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.')
parser.add_argument('--no-pin-mem', action='store_false', dest='pin_mem',
help='')
parser.set_defaults(pin_mem=True)
# distributed training parameters
parser.add_argument('--world_size', default=1, type=int,
help='number of distributed processes')
parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training')
return parser
def main(args):
utils.init_distributed_mode(args)
print(args)
if args.distillation_type != 'none' and args.finetune and not args.eval:
raise NotImplementedError("Finetuning with distillation not yet supported")
device = torch.device(args.device)
# fix the seed for reproducibility
seed = args.seed + utils.get_rank()
torch.manual_seed(seed)
np.random.seed(seed)
# random.seed(seed)
cudnn.benchmark = True
if args.load_tar:
train_dir = os.path.join(args.data_path, 'train.tar')
train_transform = build_transform(True, args)
dataset_train = DatasetTar(train_dir, transform=train_transform)
args.nb_classes = 1000
val_transform = build_transform(False, args)
eval_dir = os.path.join(args.data_path, 'val.tar')
dataset_val = DatasetTar(eval_dir, transform=val_transform)
else:
dataset_train, args.nb_classes = build_dataset(is_train=True, args=args)
dataset_val, _ = build_dataset(is_train=False, args=args)
if True: # args.distributed:
num_tasks = utils.get_world_size()
global_rank = utils.get_rank()
if args.repeated_aug:
sampler_train = RASampler(
dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True
)
else:
sampler_train = torch.utils.data.DistributedSampler(
dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True
)
if args.dist_eval:
if len(dataset_val) % num_tasks != 0:
print('Warning: Enabling distributed evaluation with an eval dataset not divisible by process number. '
'This will slightly alter validation results as extra duplicate entries are added to achieve '
'equal num of samples per-process.')
sampler_val = torch.utils.data.DistributedSampler(
dataset_val, num_replicas=num_tasks, rank=global_rank, shuffle=False)
else:
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
else:
sampler_train = torch.utils.data.RandomSampler(dataset_train)
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
data_loader_train = torch.utils.data.DataLoader(
dataset_train, sampler=sampler_train,
batch_size=args.batch_size,
num_workers=args.num_workers,
pin_memory=args.pin_mem,
drop_last=True,
)
data_loader_val = torch.utils.data.DataLoader(
dataset_val, sampler=sampler_val,
batch_size=int(1.5 * args.batch_size),
num_workers=args.num_workers,
pin_memory=args.pin_mem,
drop_last=False
)
mixup_fn = None
mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None
if mixup_active:
mixup_fn = Mixup(
mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax,
prob=args.mixup_prob, switch_prob=args.mixup_switch_prob, mode=args.mixup_mode,
label_smoothing=args.smoothing, num_classes=args.nb_classes)
print(f"Creating model: {args.model}")
model = create_model(
args.model,
pretrained=args.pretrained,
num_classes=args.nb_classes,
drop_rate=args.drop,
drop_path_rate=args.drop_path,
drop_block_rate=None,
)
if args.finetune:
if args.finetune.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.finetune, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.finetune, map_location='cpu')
checkpoint_model = checkpoint['model']
state_dict = model.state_dict()
for k in ['head.weight', 'head.bias', 'head_dist.weight', 'head_dist.bias']:
if k in checkpoint_model and checkpoint_model[k].shape != state_dict[k].shape:
print(f"Removing key {k} from pretrained checkpoint")
del checkpoint_model[k]
# interpolate position embedding
pos_embed_checkpoint = checkpoint_model['pos_embed']
embedding_size = pos_embed_checkpoint.shape[-1]
num_patches = model.patch_embed.num_patches
num_extra_tokens = model.pos_embed.shape[-2] - num_patches
# height (== width) for the checkpoint position embedding
orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5)
# height (== width) for the new position embedding
new_size = int(num_patches ** 0.5)
# class_token and dist_token are kept unchanged
extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens]
# only the position tokens are interpolated
pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:]
pos_tokens = pos_tokens.reshape(-1, orig_size, orig_size, embedding_size).permute(0, 3, 1, 2)
pos_tokens = torch.nn.functional.interpolate(
pos_tokens, size=(new_size, new_size), mode='bicubic', align_corners=False)
pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2)
new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1)
checkpoint_model['pos_embed'] = new_pos_embed
model.load_state_dict(checkpoint_model, strict=False)
model.to(device)
model_ema = None
if args.model_ema:
# Important to create EMA model after cuda(), DP wrapper, and AMP but before SyncBN and DDP wrapper
model_ema = ModelEma(
model,
decay=args.model_ema_decay,
device='cpu' if args.model_ema_force_cpu else '',
resume='')
model_without_ddp = model
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
model_without_ddp = model.module
n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad)
print('number of params:', n_parameters)
linear_scaled_lr = args.lr * args.batch_size * utils.get_world_size() / 512.0
args.lr = linear_scaled_lr
optimizer = create_optimizer(args, model_without_ddp)
loss_scaler = NativeScaler()
lr_scheduler, _ = create_scheduler(args, optimizer)
criterion = LabelSmoothingCrossEntropy()
if args.mixup > 0.:
# smoothing is handled with mixup label transform
criterion = SoftTargetCrossEntropy()
elif args.smoothing:
criterion = LabelSmoothingCrossEntropy(smoothing=args.smoothing)
else:
criterion = torch.nn.CrossEntropyLoss()
teacher_model = None
if args.distillation_type != 'none':
print(f"Creating teacher model: {args.teacher_model}")
# teacher_pretrained is True when args.teacher_path is empty
teacher_pretrained = not bool(args.teacher_path)
teacher_model = create_model(
args.teacher_model,
pretrained=teacher_pretrained,
num_classes=args.nb_classes,
global_pool='avg',
)
if not teacher_pretrained:
if args.teacher_path.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.teacher_path, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.teacher_path, map_location='cpu')
teacher_model.load_state_dict(checkpoint['model'])
teacher_model.to(device)
teacher_model.eval()
# wrap the criterion in our custom DistillationLoss, which
# just dispatches to the original criterion if args.distillation_type is 'none'
criterion = DistillationLoss(
criterion, teacher_model, args.distillation_type, args.distillation_alpha, args.distillation_tau
)
output_dir = Path(args.output_dir)
if args.resume:
if args.resume.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.resume, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.resume, map_location='cpu')
model_without_ddp.load_state_dict(checkpoint['model'])
if not args.eval and 'optimizer' in checkpoint and 'lr_scheduler' in checkpoint and 'epoch' in checkpoint:
optimizer.load_state_dict(checkpoint['optimizer'])
lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
args.start_epoch = checkpoint['epoch'] + 1
if args.model_ema:
utils._load_checkpoint_for_ema(model_ema, checkpoint['model_ema'])
if 'scaler' in checkpoint:
loss_scaler.load_state_dict(checkpoint['scaler'])
if args.eval:
test_stats = evaluate(data_loader_val, model, device)
print(f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%")
return
print(f"Start training for {args.epochs} epochs")
start_time = time.time()
max_accuracy = 0.0
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
data_loader_train.sampler.set_epoch(epoch)
train_stats = train_one_epoch(
model, criterion, data_loader_train,
optimizer, device, epoch, loss_scaler,
args.clip_grad, model_ema, mixup_fn,
set_training_mode=args.finetune == '' # keep in eval mode during finetuning
)
lr_scheduler.step(epoch)
if args.output_dir:
checkpoint_paths = [output_dir / 'checkpoint.pth']
for checkpoint_path in checkpoint_paths:
utils.save_on_master({
'model': model_without_ddp.state_dict(),
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict(),
'epoch': epoch,
'model_ema': get_state_dict(model_ema),
'scaler': loss_scaler.state_dict(),
'args': args,
}, checkpoint_path)
test_stats = evaluate(data_loader_val, model, device)
print(f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%")
max_accuracy = max(max_accuracy, test_stats["acc1"])
print(f'Max accuracy: {max_accuracy:.2f}%')
log_stats = {**{f'train_{k}': v for k, v in train_stats.items()},
**{f'test_{k}': v for k, v in test_stats.items()},
'epoch': epoch,
'n_parameters': n_parameters}
if args.output_dir and utils.is_main_process():
with (output_dir / "log.txt").open("a") as f:
f.write(json.dumps(log_stats) + "\n")
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print('Training time {}'.format(total_time_str))
if __name__ == '__main__':
parser = argparse.ArgumentParser('DeiT training and evaluation script', parents=[get_args_parser()])
args = parser.parse_args()
if args.output_dir:
Path(args.output_dir).mkdir(parents=True, exist_ok=True)
main(args)
|
Cream/MiniViT/Mini-DeiT/main.py/0
|
{
"file_path": "Cream/MiniViT/Mini-DeiT/main.py",
"repo_id": "Cream",
"token_count": 9257
}
| 291 |
MODEL:
TYPE: swin
NAME: swin_base_patch4_window7_224
DROP_PATH_RATE: 0.5
SWIN:
EMBED_DIM: 128
DEPTHS: [ 2, 2, 18, 2 ]
NUM_HEADS: [ 4, 8, 16, 32 ]
WINDOW_SIZE: 7
|
Cream/MiniViT/Mini-Swin/configs/swin_base_patch4_window7_224.yaml/0
|
{
"file_path": "Cream/MiniViT/Mini-Swin/configs/swin_base_patch4_window7_224.yaml",
"repo_id": "Cream",
"token_count": 102
}
| 292 |
import os
import time
import datetime
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.distributed as dist
import warnings
warnings.filterwarnings(action="ignore", category=UserWarning)
from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy
from timm.utils import accuracy
from timm.models import create_model
from my_meter import AverageMeter
from models import build_model
from data import build_loader
from lr_scheduler import build_scheduler
from optimizer import build_optimizer
from logger import create_logger
from utils import load_checkpoint, save_checkpoint, get_grad_norm, auto_resume_helper, reduce_tensor, parse_option
from models.swin_transformer_distill import SwinTransformerDISTILL
try:
# noinspection PyUnresolvedReferences
from apex import amp
except ImportError:
amp = None
def soft_cross_entropy(predicts, targets):
student_likelihood = torch.nn.functional.log_softmax(predicts, dim=-1)
targets_prob = torch.nn.functional.softmax(targets, dim=-1)
loss_batch = torch.sum(- targets_prob * student_likelihood, dim=-1)
return loss_batch.mean()
def cal_relation_loss(student_attn_list, teacher_attn_list, Ar):
layer_num = len(student_attn_list)
relation_loss = 0.
for student_att, teacher_att in zip(student_attn_list, teacher_attn_list):
B, N, Cs = student_att[0].shape
_, _, Ct = teacher_att[0].shape
for i in range(3):
for j in range(3):
# (B, Ar, N, Cs // Ar) @ (B, Ar, Cs // Ar, N)
# (B, Ar) + (N, N)
matrix_i = student_att[i].view(B, N, Ar, Cs//Ar).transpose(1, 2) / (Cs/Ar)**0.5
matrix_j = student_att[j].view(B, N, Ar, Cs//Ar).permute(0, 2, 3, 1)
As_ij = (matrix_i @ matrix_j)
matrix_i = teacher_att[i].view(B, N, Ar, Ct//Ar).transpose(1, 2) / (Ct/Ar)**0.5
matrix_j = teacher_att[j].view(B, N, Ar, Ct//Ar).permute(0, 2, 3, 1)
At_ij = (matrix_i @ matrix_j)
relation_loss += soft_cross_entropy(As_ij, At_ij)
return relation_loss/(9. * layer_num)
def cal_hidden_loss(student_hidden_list, teacher_hidden_list):
layer_num = len(student_hidden_list)
hidden_loss = 0.
for student_hidden, teacher_hidden in zip(student_hidden_list, teacher_hidden_list):
hidden_loss += torch.nn.MSELoss()(student_hidden, teacher_hidden)
return hidden_loss/layer_num
def cal_hidden_relation_loss(student_hidden_list, teacher_hidden_list):
layer_num = len(student_hidden_list)
B, N, Cs = student_hidden_list[0].shape
_, _, Ct = teacher_hidden_list[0].shape
hidden_loss = 0.
for student_hidden, teacher_hidden in zip(student_hidden_list, teacher_hidden_list):
student_hidden = torch.nn.functional.normalize(student_hidden, dim=-1)
teacher_hidden = torch.nn.functional.normalize(teacher_hidden, dim=-1)
student_relation = student_hidden @ student_hidden.transpose(-1, -2)
teacher_relation = teacher_hidden @ teacher_hidden.transpose(-1, -2)
hidden_loss += torch.mean((student_relation - teacher_relation)**2) * 49 #Window size x Window size
return hidden_loss/layer_num
def load_teacher_model(type='large'):
if type == 'large':
embed_dim = 192
depths = [ 2, 2, 18, 2 ]
num_heads = [ 6, 12, 24, 48 ]
window_size = 7
elif type == 'base':
embed_dim = 128
depths = [ 2, 2, 18, 2 ]
num_heads = [ 4, 8, 16, 32 ]
window_size = 7
else:
raise ValueError('Unsupported type: %s'%type)
model = SwinTransformerDISTILL(img_size=224,
patch_size=4,
in_chans=3,
num_classes=1000,
embed_dim=embed_dim,
depths=depths,
num_heads=num_heads,
window_size=window_size,
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
drop_rate=0.0,
drop_path_rate=0.1,
ape=False,
patch_norm=True,
use_checkpoint=False,
# distillation
is_student=False)
return model
def main(config):
dataset_train, dataset_val, data_loader_train, data_loader_val, mixup_fn = build_loader(config)
if config.DISTILL.DO_DISTILL:
logger.info(f"Loading teacher model:{config.MODEL.TYPE}/{config.DISTILL.TEACHER}")
model_checkpoint_name = os.path.basename(config.DISTILL.TEACHER)
if 'regnety_160' in model_checkpoint_name:
model_teacher = create_model(
'regnety_160',
pretrained=False,
num_classes=config.MODEL.NUM_CLASSES,
global_pool='avg',
)
if config.DISTILL.TEACHER.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
config.DISTILL.TEACHER, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(config.DISTILL.TEACHER, map_location='cpu')
model_teacher.load_state_dict(checkpoint['model'])
model_teacher.cuda()
model_teacher.eval()
del checkpoint
torch.cuda.empty_cache()
else:
if 'base' in model_checkpoint_name:
teacher_type = 'base'
elif 'large' in model_checkpoint_name:
teacher_type = 'large'
else:
teacher_type = None
model_teacher = load_teacher_model(type=teacher_type)
model_teacher.cuda()
model_teacher = torch.nn.parallel.DistributedDataParallel(model_teacher, device_ids=[config.LOCAL_RANK], broadcast_buffers=False)
checkpoint = torch.load(config.DISTILL.TEACHER, map_location='cpu')
msg = model_teacher.module.load_state_dict(checkpoint['model'], strict=False)
logger.info(msg)
del checkpoint
torch.cuda.empty_cache()
logger.info(f"Creating model:{config.MODEL.TYPE}/{config.MODEL.NAME}")
model = build_model(config)
model.cuda()
logger.info(str(model))
optimizer = build_optimizer(config, model)
if config.AMP_OPT_LEVEL != "O0":
model, optimizer = amp.initialize(model, optimizer, opt_level=config.AMP_OPT_LEVEL)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[config.LOCAL_RANK], broadcast_buffers=False, find_unused_parameters=True)
model_without_ddp = model.module
n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad)
logger.info(f"number of params: {n_parameters}")
if hasattr(model_without_ddp, 'flops'):
flops = model_without_ddp.flops()
logger.info(f"number of GFLOPs: {flops / 1e9}")
lr_scheduler = build_scheduler(config, optimizer, len(data_loader_train))
criterion_soft = soft_cross_entropy
criterion_attn = cal_relation_loss
criterion_hidden = cal_hidden_relation_loss if config.DISTILL.HIDDEN_RELATION else cal_hidden_loss
if config.AUG.MIXUP > 0.:
# smoothing is handled with mixup label transform
criterion_truth = SoftTargetCrossEntropy()
elif config.MODEL.LABEL_SMOOTHING > 0.:
criterion_truth = LabelSmoothingCrossEntropy(smoothing=config.MODEL.LABEL_SMOOTHING)
else:
criterion_truth = torch.nn.CrossEntropyLoss()
max_accuracy = 0.0
if config.TRAIN.AUTO_RESUME:
resume_file = auto_resume_helper(config.OUTPUT)
if resume_file:
if config.MODEL.RESUME:
logger.warning(f"auto-resume changing resume file from {config.MODEL.RESUME} to {resume_file}")
config.defrost()
config.MODEL.RESUME = resume_file
config.DISTILL.RESUME_WEIGHT_ONLY = False
config.freeze()
logger.info(f'auto resuming from {resume_file}')
else:
logger.info(f'no checkpoint found in {config.OUTPUT}, ignoring auto resume')
if config.MODEL.RESUME:
max_accuracy = load_checkpoint(config, model_without_ddp, optimizer, lr_scheduler, logger)
acc1, acc5, loss = validate(config, data_loader_val, model, logger)
logger.info(f"Accuracy of the network on the {len(dataset_val)} test images: {acc1:.1f}%")
if config.EVAL_MODE:
return
if config.THROUGHPUT_MODE:
throughput(data_loader_val, model, logger)
return
logger.info("Start training")
start_time = time.time()
for epoch in range(config.TRAIN.START_EPOCH, config.TRAIN.EPOCHS):
data_loader_train.sampler.set_epoch(epoch)
if config.DISTILL.DO_DISTILL:
train_one_epoch_distill(config, model, model_teacher, data_loader_train, optimizer, epoch, mixup_fn, lr_scheduler, criterion_soft=criterion_soft, criterion_truth=criterion_truth, criterion_attn=criterion_attn, criterion_hidden=criterion_hidden)
else:
train_one_epoch(config, model, criterion_truth, data_loader_train, optimizer, epoch, mixup_fn, lr_scheduler)
if dist.get_rank() == 0 and (epoch % config.SAVE_FREQ == 0 or epoch == (config.TRAIN.EPOCHS - 1)):
save_checkpoint(config, epoch, model_without_ddp, max_accuracy, optimizer, lr_scheduler, logger)
if epoch % config.EVAL_FREQ == 0 or epoch == config.TRAIN.EPOCHS - 1:
acc1, acc5, loss = validate(config, data_loader_val, model, logger)
logger.info(f"Accuracy of the network on the {len(dataset_val)} test images: {acc1:.1f}%")
max_accuracy = max(max_accuracy, acc1)
logger.info(f'Max accuracy: {max_accuracy:.2f}%')
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
logger.info('Training time {}'.format(total_time_str))
def train_one_epoch_distill(config, model, model_teacher, data_loader, optimizer, epoch, mixup_fn, lr_scheduler, criterion_soft=None, criterion_truth=None, criterion_attn=None, criterion_hidden=None):
layer_id_s_list = config.DISTILL.STUDENT_LAYER_LIST
layer_id_t_list = config.DISTILL.TEACHER_LAYER_LIST
model.train()
optimizer.zero_grad()
model_teacher.eval()
num_steps = len(data_loader)
batch_time = AverageMeter()
loss_meter = AverageMeter()
norm_meter = AverageMeter()
loss_soft_meter = AverageMeter()
loss_truth_meter = AverageMeter()
loss_attn_meter = AverageMeter()
loss_hidden_meter = AverageMeter()
acc1_meter = AverageMeter()
acc5_meter = AverageMeter()
teacher_acc1_meter = AverageMeter()
teacher_acc5_meter = AverageMeter()
start = time.time()
end = time.time()
for idx, (samples, targets) in enumerate(data_loader):
samples = samples.cuda(non_blocking=True)
targets = targets.cuda(non_blocking=True)
original_targets = targets
if mixup_fn is not None:
samples, targets = mixup_fn(samples, targets)
if config.DISTILL.ATTN_LOSS and config.DISTILL.HIDDEN_LOSS:
outputs, qkv_s, hidden_s = model(samples, layer_id_s_list, is_attn_loss=True, is_hidden_loss=True, is_hidden_org=config.DISTILL.HIDDEN_RELATION)
elif config.DISTILL.ATTN_LOSS:
outputs, qkv_s = model(samples, layer_id_s_list, is_attn_loss=True, is_hidden_loss=False, is_hidden_org=config.DISTILL.HIDDEN_RELATION)
elif config.DISTILL.HIDDEN_LOSS:
outputs, hidden_s = model(samples, layer_id_s_list, is_attn_loss=False, is_hidden_loss=True, is_hidden_org=config.DISTILL.HIDDEN_RELATION)
else:
outputs = model(samples)
with torch.no_grad():
acc1, acc5 = accuracy(outputs, original_targets, topk=(1, 5))
if config.DISTILL.ATTN_LOSS or config.DISTILL.HIDDEN_LOSS:
outputs_teacher, qkv_t, hidden_t = model_teacher(samples, layer_id_t_list, is_attn_loss=True, is_hidden_loss=True)
else:
outputs_teacher = model_teacher(samples)
teacher_acc1, teacher_acc5 = accuracy(outputs_teacher, original_targets, topk=(1, 5))
if config.TRAIN.ACCUMULATION_STEPS > 1:
loss_truth = config.DISTILL.ALPHA*criterion_truth(outputs, targets)
loss_soft = (1.0 - config.DISTILL.ALPHA)*criterion_soft(outputs/config.DISTILL.TEMPERATURE, outputs_teacher/config.DISTILL.TEMPERATURE)
if config.DISTILL.ATTN_LOSS:
loss_attn= config.DISTILL.QKV_LOSS_WEIGHT * criterion_attn(qkv_s, qkv_t, config.DISTILL.AR)
else:
loss_attn = torch.zeros(loss_truth.shape)
if config.DISTILL.HIDDEN_LOSS:
loss_hidden = config.DISTILL.HIDDEN_LOSS_WEIGHT*criterion_hidden(hidden_s, hidden_t)
else:
loss_hidden = torch.zeros(loss_truth.shape)
loss = loss_truth + loss_soft + loss_attn + loss_hidden
loss = loss / config.TRAIN.ACCUMULATION_STEPS
if config.AMP_OPT_LEVEL != "O0":
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
if config.TRAIN.CLIP_GRAD:
grad_norm = torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), config.TRAIN.CLIP_GRAD)
else:
grad_norm = get_grad_norm(amp.master_params(optimizer))
else:
loss.backward()
if config.TRAIN.CLIP_GRAD:
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), config.TRAIN.CLIP_GRAD)
else:
grad_norm = get_grad_norm(model.parameters())
if (idx + 1) % config.TRAIN.ACCUMULATION_STEPS == 0:
optimizer.step()
optimizer.zero_grad()
lr_scheduler.step_update(epoch * num_steps + idx)
else:
loss_truth = config.DISTILL.ALPHA*criterion_truth(outputs, targets)
loss_soft = (1.0 - config.DISTILL.ALPHA)*criterion_soft(outputs/config.DISTILL.TEMPERATURE, outputs_teacher/config.DISTILL.TEMPERATURE)
if config.DISTILL.ATTN_LOSS:
loss_attn= config.DISTILL.QKV_LOSS_WEIGHT * criterion_attn(qkv_s, qkv_t, config.DISTILL.AR)
else:
loss_attn = torch.zeros(loss_truth.shape)
if config.DISTILL.HIDDEN_LOSS:
loss_hidden = config.DISTILL.HIDDEN_LOSS_WEIGHT*criterion_hidden(hidden_s, hidden_t)
else:
loss_hidden = torch.zeros(loss_truth.shape)
loss = loss_truth + loss_soft + loss_attn + loss_hidden
optimizer.zero_grad()
if config.AMP_OPT_LEVEL != "O0":
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
if config.TRAIN.CLIP_GRAD:
grad_norm = torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), config.TRAIN.CLIP_GRAD)
else:
grad_norm = get_grad_norm(amp.master_params(optimizer))
else:
loss.backward()
if config.TRAIN.CLIP_GRAD:
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), config.TRAIN.CLIP_GRAD)
else:
grad_norm = get_grad_norm(model.parameters())
optimizer.step()
lr_scheduler.step_update(epoch * num_steps + idx)
torch.cuda.synchronize()
loss_meter.update(loss.item(), targets.size(0))
loss_soft_meter.update(loss_soft.item(), targets.size(0))
loss_truth_meter.update(loss_truth.item(), targets.size(0))
loss_attn_meter.update(loss_attn.item(), targets.size(0))
loss_hidden_meter.update(loss_hidden.item(), targets.size(0))
norm_meter.update(grad_norm)
batch_time.update(time.time() - end)
end = time.time()
acc1_meter.update(acc1.item(), targets.size(0))
acc5_meter.update(acc5.item(), targets.size(0))
teacher_acc1_meter.update(teacher_acc1.item(), targets.size(0))
teacher_acc5_meter.update(teacher_acc5.item(), targets.size(0))
if idx % config.PRINT_FREQ == 0:
lr = optimizer.param_groups[0]['lr']
memory_used = torch.cuda.max_memory_allocated() / (1024.0 * 1024.0)
etas = batch_time.avg * (num_steps - idx)
logger.info(
f'Train: [{epoch}/{config.TRAIN.EPOCHS}][{idx}/{num_steps}]\t'
f'eta {datetime.timedelta(seconds=int(etas))} lr {lr:.6f}\t'
f'time {batch_time.val:.4f} ({batch_time.avg:.4f})\t'
f'Acc@1 {acc1_meter.avg:.3f} Acc@5 {acc5_meter.avg:.3f}\t'
f'Teacher_Acc@1 {teacher_acc1_meter.avg:.3f} Teacher_Acc@5 {teacher_acc5_meter.avg:.3f}\t'
f'loss {loss_meter.val:.4f} ({loss_meter.avg:.4f})\t'
f'loss_soft {loss_soft_meter.val:.4f} ({loss_soft_meter.avg:.4f})\t'
f'loss_truth {loss_truth_meter.val:.4f} ({loss_truth_meter.avg:.4f})\t'
f'loss_attn {loss_attn_meter.val:.4f} ({loss_attn_meter.avg:.4f})\t'
f'loss_hidden {loss_hidden_meter.val:.4f} ({loss_hidden_meter.avg:.4f})\t'
f'grad_norm {norm_meter.val:.4f} ({norm_meter.avg:.4f})\t'
f'mem {memory_used:.0f}MB')
epoch_time = time.time() - start
logger.info(f"EPOCH {epoch} training takes {datetime.timedelta(seconds=int(epoch_time))}")
def train_one_epoch(config, model, criterion, data_loader, optimizer, epoch, mixup_fn, lr_scheduler):
model.train()
optimizer.zero_grad()
num_steps = len(data_loader)
batch_time = AverageMeter()
loss_meter = AverageMeter()
norm_meter = AverageMeter()
acc1_meter = AverageMeter()
acc5_meter = AverageMeter()
start = time.time()
end = time.time()
for idx, (samples, targets) in enumerate(data_loader):
samples = samples.cuda(non_blocking=True)
targets = targets.cuda(non_blocking=True)
original_targets = targets
if mixup_fn is not None:
samples, targets = mixup_fn(samples, targets)
outputs = model(samples)
with torch.no_grad():
acc1, acc5 = accuracy(outputs, original_targets, topk=(1, 5))
if config.TRAIN.ACCUMULATION_STEPS > 1:
loss = criterion(outputs, targets)
loss = loss / config.TRAIN.ACCUMULATION_STEPS
if config.AMP_OPT_LEVEL != "O0":
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
if config.TRAIN.CLIP_GRAD:
grad_norm = torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), config.TRAIN.CLIP_GRAD)
else:
grad_norm = get_grad_norm(amp.master_params(optimizer))
else:
loss.backward()
if config.TRAIN.CLIP_GRAD:
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), config.TRAIN.CLIP_GRAD)
else:
grad_norm = get_grad_norm(model.parameters())
if (idx + 1) % config.TRAIN.ACCUMULATION_STEPS == 0:
optimizer.step()
optimizer.zero_grad()
lr_scheduler.step_update(epoch * num_steps + idx)
else:
loss = criterion(outputs, targets)
optimizer.zero_grad()
if config.AMP_OPT_LEVEL != "O0":
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
if config.TRAIN.CLIP_GRAD:
grad_norm = torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), config.TRAIN.CLIP_GRAD)
else:
grad_norm = get_grad_norm(amp.master_params(optimizer))
else:
loss.backward()
if config.TRAIN.CLIP_GRAD:
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), config.TRAIN.CLIP_GRAD)
else:
grad_norm = get_grad_norm(model.parameters())
optimizer.step()
lr_scheduler.step_update(epoch * num_steps + idx)
torch.cuda.synchronize()
loss_meter.update(loss.item(), targets.size(0))
norm_meter.update(grad_norm)
batch_time.update(time.time() - end)
end = time.time()
acc1_meter.update(acc1.item(), targets.size(0))
acc5_meter.update(acc5.item(), targets.size(0))
if idx % config.PRINT_FREQ == 0:
lr = optimizer.param_groups[0]['lr']
memory_used = torch.cuda.max_memory_allocated() / (1024.0 * 1024.0)
etas = batch_time.avg * (num_steps - idx)
logger.info(
f'Train: [{epoch}/{config.TRAIN.EPOCHS}][{idx}/{num_steps}]\t'
f'eta {datetime.timedelta(seconds=int(etas))} lr {lr:.6f}\t'
f'time {batch_time.val:.4f} ({batch_time.avg:.4f})\t'
f'Acc@1 {acc1_meter.avg:.3f} Acc@5 {acc5_meter.avg:.3f}\t'
f'loss {loss_meter.val:.4f} ({loss_meter.avg:.4f})\t'
f'grad_norm {norm_meter.val:.4f} ({norm_meter.avg:.4f})\t'
f'mem {memory_used:.0f}MB')
epoch_time = time.time() - start
logger.info(f"EPOCH {epoch} training takes {datetime.timedelta(seconds=int(epoch_time))}")
@torch.no_grad()
def validate(config, data_loader, model, logger):
criterion = torch.nn.CrossEntropyLoss()
model.eval()
batch_time = AverageMeter()
loss_meter = AverageMeter()
acc1_meter = AverageMeter()
acc5_meter = AverageMeter()
end = time.time()
for idx, (images, target) in enumerate(data_loader):
images = images.cuda(non_blocking=True)
target = target.cuda(non_blocking=True)
output = model(images)
# measure accuracy and record loss
loss = criterion(output, target)
acc1, acc5 = accuracy(output, target, topk=(1, 5))
loss_meter.update(loss.item(), target.size(0))
acc1_meter.update(acc1.item(), target.size(0))
acc5_meter.update(acc5.item(), target.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if idx % config.PRINT_FREQ == 0:
memory_used = torch.cuda.max_memory_allocated() / (1024.0 * 1024.0)
logger.info(
f'Test: [{idx}/{len(data_loader)}]\t'
f'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
f'Loss {loss_meter.val:.4f} ({loss_meter.avg:.4f})\t'
f'Acc@1 {acc1_meter.val:.3f} ({acc1_meter.avg:.3f})\t'
f'Acc@5 {acc5_meter.val:.3f} ({acc5_meter.avg:.3f})\t'
f'Mem {memory_used:.0f}MB')
loss_meter.sync()
acc1_meter.sync()
acc5_meter.sync()
logger.info(f' * Acc@1 {acc1_meter.avg:.3f} Acc@5 {acc5_meter.avg:.3f}')
return acc1_meter.avg, acc5_meter.avg, loss_meter.avg
@torch.no_grad()
def throughput(data_loader, model, logger):
model.eval()
for idx, (images, _) in enumerate(data_loader):
images = images.cuda(non_blocking=True)
batch_size = images.shape[0]
for i in range(50):
model(images)
torch.cuda.synchronize()
logger.info(f"throughput averaged with 30 times")
tic1 = time.time()
for i in range(30):
model(images)
torch.cuda.synchronize()
tic2 = time.time()
logger.info(f"batch_size {batch_size} throughput {30 * batch_size / (tic2 - tic1)}")
return
if __name__ == '__main__':
_, config = parse_option()
if config.AMP_OPT_LEVEL != "O0":
assert amp is not None, "amp not installed!"
if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ:
rank = int(os.environ["RANK"])
world_size = int(os.environ['WORLD_SIZE'])
print(f"RANK and WORLD_SIZE in environ: {rank}/{world_size}")
else:
rank = -1
world_size = -1
torch.cuda.set_device(config.LOCAL_RANK)
torch.distributed.init_process_group(backend='nccl', init_method='env://', world_size=world_size, rank=rank)
torch.distributed.barrier()
seed = config.SEED + dist.get_rank()
torch.manual_seed(seed)
np.random.seed(seed)
cudnn.benchmark = True
# linear scale the learning rate according to total batch size, may not be optimal
linear_scaled_lr = config.TRAIN.BASE_LR * config.DATA.BATCH_SIZE * dist.get_world_size() / 512.0
linear_scaled_warmup_lr = config.TRAIN.WARMUP_LR * config.DATA.BATCH_SIZE * dist.get_world_size() / 512.0
linear_scaled_min_lr = config.TRAIN.MIN_LR * config.DATA.BATCH_SIZE * dist.get_world_size() / 512.0
# gradient accumulation also need to scale the learning rate
if config.TRAIN.ACCUMULATION_STEPS > 1:
linear_scaled_lr = linear_scaled_lr * config.TRAIN.ACCUMULATION_STEPS
linear_scaled_warmup_lr = linear_scaled_warmup_lr * config.TRAIN.ACCUMULATION_STEPS
linear_scaled_min_lr = linear_scaled_min_lr * config.TRAIN.ACCUMULATION_STEPS
config.defrost()
config.TRAIN.BASE_LR = linear_scaled_lr
config.TRAIN.WARMUP_LR = linear_scaled_warmup_lr
config.TRAIN.MIN_LR = linear_scaled_min_lr
config.freeze()
os.makedirs(config.OUTPUT, exist_ok=True)
logger = create_logger(output_dir=config.OUTPUT, dist_rank=dist.get_rank(), name=f"{config.MODEL.NAME}")
if dist.get_rank() == 0:
path = os.path.join(config.OUTPUT, "config.json")
with open(path, "w") as f:
f.write(config.dump())
logger.info(f"Full config saved to {path}")
# print config
logger.info(config.dump())
main(config)
|
Cream/MiniViT/Mini-Swin/main.py/0
|
{
"file_path": "Cream/MiniViT/Mini-Swin/main.py",
"repo_id": "Cream",
"token_count": 12739
}
| 293 |
{
"embed_dim": 512,
"vision_cfg": {
"image_size": 224,
"layers": 12,
"width": 512,
"patch_size": 16
},
"text_cfg": {
"context_length": 77,
"vocab_size": 49408,
"width": 512,
"heads": 8,
"layers": 6
}
}
|
Cream/TinyCLIP/src/open_clip/model_configs/TinyCLIP-ViT-39M-16-Text-19M.json/0
|
{
"file_path": "Cream/TinyCLIP/src/open_clip/model_configs/TinyCLIP-ViT-39M-16-Text-19M.json",
"repo_id": "Cream",
"token_count": 172
}
| 294 |
""" Mixup and Cutmix
Papers:
mixup: Beyond Empirical Risk Minimization (https://arxiv.org/abs/1710.09412)
CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features (https://arxiv.org/abs/1905.04899)
Code Reference:
CutMix: https://github.com/clovaai/CutMix-PyTorch
Hacked together by / Copyright 2020 Ross Wightman
"""
import numpy as np
import torch
from .aug_random import AugRandomContext, random, np_random
def one_hot(x, num_classes, on_value=1., off_value=0., device='cuda'):
x = x.long().view(-1, 1)
return torch.full((x.size()[0], num_classes), off_value, device=device).scatter_(1, x, on_value)
def mixup_target(target, num_classes, lam=1., smoothing=0.0, device='cuda'):
off_value = smoothing / num_classes
on_value = 1. - smoothing + off_value
y1 = one_hot(target, num_classes, on_value=on_value, off_value=off_value, device=device)
y2 = one_hot(target.flip(0), num_classes, on_value=on_value, off_value=off_value, device=device)
return y1 * lam + y2 * (1. - lam)
def rand_bbox(img_shape, lam, margin=0., count=None):
""" Standard CutMix bounding-box
Generates a random square bbox based on lambda value. This impl includes
support for enforcing a border margin as percent of bbox dimensions.
Args:
img_shape (tuple): Image shape as tuple
lam (float): Cutmix lambda value
margin (float): Percentage of bbox dimension to enforce as margin (reduce amount of box outside image)
count (int): Number of bbox to generate
"""
ratio = np.sqrt(1 - lam)
img_h, img_w = img_shape[-2:]
cut_h, cut_w = int(img_h * ratio), int(img_w * ratio)
margin_y, margin_x = int(margin * cut_h), int(margin * cut_w)
cy = np_random.randint(0 + margin_y, img_h - margin_y, size=count)
cx = np_random.randint(0 + margin_x, img_w - margin_x, size=count)
yl = np.clip(cy - cut_h // 2, 0, img_h)
yh = np.clip(cy + cut_h // 2, 0, img_h)
xl = np.clip(cx - cut_w // 2, 0, img_w)
xh = np.clip(cx + cut_w // 2, 0, img_w)
return yl, yh, xl, xh
def rand_bbox_minmax(img_shape, minmax, count=None):
""" Min-Max CutMix bounding-box
Inspired by Darknet cutmix impl, generates a random rectangular bbox
based on min/max percent values applied to each dimension of the input image.
Typical defaults for minmax are usually in the .2-.3 for min and .8-.9 range for max.
Args:
img_shape (tuple): Image shape as tuple
minmax (tuple or list): Min and max bbox ratios (as percent of image size)
count (int): Number of bbox to generate
"""
assert len(minmax) == 2
img_h, img_w = img_shape[-2:]
cut_h = np_random.randint(int(img_h * minmax[0]), int(img_h * minmax[1]), size=count)
cut_w = np_random.randint(int(img_w * minmax[0]), int(img_w * minmax[1]), size=count)
yl = np_random.randint(0, img_h - cut_h, size=count)
xl = np_random.randint(0, img_w - cut_w, size=count)
yu = yl + cut_h
xu = xl + cut_w
return yl, yu, xl, xu
def cutmix_bbox_and_lam(img_shape, lam, ratio_minmax=None, correct_lam=True, count=None):
""" Generate bbox and apply lambda correction.
"""
if ratio_minmax is not None:
yl, yu, xl, xu = rand_bbox_minmax(img_shape, ratio_minmax, count=count)
else:
yl, yu, xl, xu = rand_bbox(img_shape, lam, count=count)
if correct_lam or ratio_minmax is not None:
bbox_area = (yu - yl) * (xu - xl)
lam = 1. - bbox_area / float(img_shape[-2] * img_shape[-1])
return (yl, yu, xl, xu), lam
class Mixup:
""" Mixup/Cutmix that applies different params to each element or whole batch
Args:
mixup_alpha (float): mixup alpha value, mixup is active if > 0.
cutmix_alpha (float): cutmix alpha value, cutmix is active if > 0.
cutmix_minmax (List[float]): cutmix min/max image ratio, cutmix is active and uses this vs alpha if not None.
prob (float): probability of applying mixup or cutmix per batch or element
switch_prob (float): probability of switching to cutmix instead of mixup when both are active
mode (str): how to apply mixup/cutmix params (per 'batch', 'pair' (pair of elements), 'elem' (element)
correct_lam (bool): apply lambda correction when cutmix bbox clipped by image borders
label_smoothing (float): apply label smoothing to the mixed target tensor
num_classes (int): number of classes for target
"""
def __init__(self, mixup_alpha=1., cutmix_alpha=0., cutmix_minmax=None, prob=1.0, switch_prob=0.5,
mode='batch', correct_lam=True, label_smoothing=0.1, num_classes=1000):
self.mixup_alpha = mixup_alpha
self.cutmix_alpha = cutmix_alpha
self.cutmix_minmax = cutmix_minmax
if self.cutmix_minmax is not None:
assert len(self.cutmix_minmax) == 2
# force cutmix alpha == 1.0 when minmax active to keep logic simple & safe
self.cutmix_alpha = 1.0
self.mix_prob = prob
self.switch_prob = switch_prob
self.label_smoothing = label_smoothing
self.num_classes = num_classes
self.mode = mode
assert self.mode in ['batch', 'pair', 'elem', 'pair2'], 'Invalid mode: {}'.format(self.mode)
assert self.mode in ['pair2'], 'The mode of mixup should be `pair2` when saving logits'
self.correct_lam = correct_lam # correct lambda based on clipped area for cutmix
self.mixup_enabled = True # set to false to disable mixing (intended tp be set by train loop)
def _params_per_elem(self, batch_size):
lam = np.ones(batch_size, dtype=np.float32)
use_cutmix = np.zeros(batch_size, dtype=np.bool)
if self.mixup_enabled:
if self.mixup_alpha > 0. and self.cutmix_alpha > 0.:
use_cutmix = np_random.rand(batch_size) < self.switch_prob
lam_mix = np.where(
use_cutmix,
np_random.beta(self.cutmix_alpha, self.cutmix_alpha, size=batch_size),
np_random.beta(self.mixup_alpha, self.mixup_alpha, size=batch_size))
elif self.mixup_alpha > 0.:
lam_mix = np_random.beta(self.mixup_alpha, self.mixup_alpha, size=batch_size)
elif self.cutmix_alpha > 0.:
use_cutmix = np.ones(batch_size, dtype=np.bool)
lam_mix = np_random.beta(self.cutmix_alpha, self.cutmix_alpha, size=batch_size)
else:
assert False, "One of mixup_alpha > 0., cutmix_alpha > 0., cutmix_minmax not None should be true."
lam = np.where(np_random.rand(batch_size) < self.mix_prob, lam_mix.astype(np.float32), lam)
return lam, use_cutmix
def _params_per_batch(self):
lam = 1.
use_cutmix = False
if self.mixup_enabled and np_random.rand() < self.mix_prob:
if self.mixup_alpha > 0. and self.cutmix_alpha > 0.:
use_cutmix = np_random.rand() < self.switch_prob
lam_mix = np_random.beta(self.cutmix_alpha, self.cutmix_alpha) if use_cutmix else \
np_random.beta(self.mixup_alpha, self.mixup_alpha)
elif self.mixup_alpha > 0.:
lam_mix = np_random.beta(self.mixup_alpha, self.mixup_alpha)
elif self.cutmix_alpha > 0.:
use_cutmix = True
lam_mix = np_random.beta(self.cutmix_alpha, self.cutmix_alpha)
else:
assert False, "One of mixup_alpha > 0., cutmix_alpha > 0., cutmix_minmax not None should be true."
lam = float(lam_mix)
return lam, use_cutmix
def _mix_elem(self, x):
batch_size = len(x)
lam_batch, use_cutmix = self._params_per_elem(batch_size)
x_orig = x.clone() # need to keep an unmodified original for mixing source
for i in range(batch_size):
j = batch_size - i - 1
lam = lam_batch[i]
if lam != 1.:
if use_cutmix[i]:
(yl, yh, xl, xh), lam = cutmix_bbox_and_lam(
x[i].shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)
x[i][:, yl:yh, xl:xh] = x_orig[j][:, yl:yh, xl:xh]
lam_batch[i] = lam
else:
x[i] = x[i] * lam + x_orig[j] * (1 - lam)
return torch.tensor(lam_batch, device=x.device, dtype=x.dtype).unsqueeze(1)
def _mix_pair(self, x):
batch_size = len(x)
lam_batch, use_cutmix = self._params_per_elem(batch_size // 2)
x_orig = x.clone() # need to keep an unmodified original for mixing source
for i in range(batch_size // 2):
j = batch_size - i - 1
lam = lam_batch[i]
if lam != 1.:
if use_cutmix[i]:
(yl, yh, xl, xh), lam = cutmix_bbox_and_lam(
x[i].shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)
x[i][:, yl:yh, xl:xh] = x_orig[j][:, yl:yh, xl:xh]
x[j][:, yl:yh, xl:xh] = x_orig[i][:, yl:yh, xl:xh]
lam_batch[i] = lam
else:
x[i] = x[i] * lam + x_orig[j] * (1 - lam)
x[j] = x[j] * lam + x_orig[i] * (1 - lam)
lam_batch = np.concatenate((lam_batch, lam_batch[::-1]))
return torch.tensor(lam_batch, device=x.device, dtype=x.dtype).unsqueeze(1)
def _mix_batch(self, x):
lam, use_cutmix = self._params_per_batch()
if lam == 1.:
return 1.
if use_cutmix:
(yl, yh, xl, xh), lam = cutmix_bbox_and_lam(
x.shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)
x[:, :, yl:yh, xl:xh] = x.flip(0)[:, :, yl:yh, xl:xh]
else:
x_flipped = x.flip(0).mul_(1. - lam)
x.mul_(lam).add_(x_flipped)
return lam
def _mix_pair2(self, x, seeds):
assert seeds is not None, "seeds must be provided when mode is `pair2` in mixup"
batch_size = len(x)
lam_batch = np.ones(batch_size, dtype=np.float32)
for i in range(0, batch_size, 2):
# for each pair x[i] and x[i + 1]
seed = int(seeds[i] ^ seeds[i + 1])
with AugRandomContext(seed=seed):
lam, use_cutmix = self._params_per_batch()
lam_batch[i:i+2] = lam
if lam == 1.:
continue
if use_cutmix:
# cutmix
(yl, yh, xl, xh), lam = cutmix_bbox_and_lam(
x[i].shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)
x[i:i+2, :, yl:yh, xl:xh] = x[i:i+2].flip(0)[:, :, yl:yh, xl:xh]
else:
# mixup
x_flipped = x[i:i+2].flip(0).mul_(1. - lam)
x[i:i+2].mul_(lam).add_(x_flipped)
return torch.tensor(lam_batch, device=x.device, dtype=x.dtype).unsqueeze(1)
def __call__(self, x, target, seeds=None):
assert len(x) % 2 == 0, 'Batch size should be even when using this'
if self.mode == 'elem':
lam = self._mix_elem(x)
elif self.mode == 'pair':
lam = self._mix_pair(x)
elif self.mode == 'pair2':
lam = self._mix_pair2(x, seeds)
else:
lam = self._mix_batch(x)
if target is not None:
target = mixup_target(target, self.num_classes, lam, self.label_smoothing, x.device)
return x, target
class FastCollateMixup(Mixup):
""" Fast Collate w/ Mixup/Cutmix that applies different params to each element or whole batch
A Mixup impl that's performed while collating the batches.
"""
def _mix_elem_collate(self, output, batch, half=False):
batch_size = len(batch)
num_elem = batch_size // 2 if half else batch_size
assert len(output) == num_elem
lam_batch, use_cutmix = self._params_per_elem(num_elem)
for i in range(num_elem):
j = batch_size - i - 1
lam = lam_batch[i]
mixed = batch[i][0]
if lam != 1.:
if use_cutmix[i]:
if not half:
mixed = mixed.copy()
(yl, yh, xl, xh), lam = cutmix_bbox_and_lam(
output.shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)
mixed[:, yl:yh, xl:xh] = batch[j][0][:, yl:yh, xl:xh]
lam_batch[i] = lam
else:
mixed = mixed.astype(np.float32) * lam + batch[j][0].astype(np.float32) * (1 - lam)
np.rint(mixed, out=mixed)
output[i] += torch.from_numpy(mixed.astype(np.uint8))
if half:
lam_batch = np.concatenate((lam_batch, np.ones(num_elem)))
return torch.tensor(lam_batch).unsqueeze(1)
def _mix_pair_collate(self, output, batch):
batch_size = len(batch)
lam_batch, use_cutmix = self._params_per_elem(batch_size // 2)
for i in range(batch_size // 2):
j = batch_size - i - 1
lam = lam_batch[i]
mixed_i = batch[i][0]
mixed_j = batch[j][0]
assert 0 <= lam <= 1.0
if lam < 1.:
if use_cutmix[i]:
(yl, yh, xl, xh), lam = cutmix_bbox_and_lam(
output.shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)
patch_i = mixed_i[:, yl:yh, xl:xh].copy()
mixed_i[:, yl:yh, xl:xh] = mixed_j[:, yl:yh, xl:xh]
mixed_j[:, yl:yh, xl:xh] = patch_i
lam_batch[i] = lam
else:
mixed_temp = mixed_i.astype(np.float32) * lam + mixed_j.astype(np.float32) * (1 - lam)
mixed_j = mixed_j.astype(np.float32) * lam + mixed_i.astype(np.float32) * (1 - lam)
mixed_i = mixed_temp
np.rint(mixed_j, out=mixed_j)
np.rint(mixed_i, out=mixed_i)
output[i] += torch.from_numpy(mixed_i.astype(np.uint8))
output[j] += torch.from_numpy(mixed_j.astype(np.uint8))
lam_batch = np.concatenate((lam_batch, lam_batch[::-1]))
return torch.tensor(lam_batch).unsqueeze(1)
def _mix_batch_collate(self, output, batch):
batch_size = len(batch)
lam, use_cutmix = self._params_per_batch()
if use_cutmix:
(yl, yh, xl, xh), lam = cutmix_bbox_and_lam(
output.shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam)
for i in range(batch_size):
j = batch_size - i - 1
mixed = batch[i][0]
if lam != 1.:
if use_cutmix:
mixed = mixed.copy() # don't want to modify the original while iterating
mixed[:, yl:yh, xl:xh] = batch[j][0][:, yl:yh, xl:xh]
else:
mixed = mixed.astype(np.float32) * lam + batch[j][0].astype(np.float32) * (1 - lam)
np.rint(mixed, out=mixed)
output[i] += torch.from_numpy(mixed.astype(np.uint8))
return lam
def __call__(self, batch, _=None):
batch_size = len(batch)
assert batch_size % 2 == 0, 'Batch size should be even when using this'
half = 'half' in self.mode
if half:
batch_size //= 2
output = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8)
if self.mode == 'elem' or self.mode == 'half':
lam = self._mix_elem_collate(output, batch, half=half)
elif self.mode == 'pair':
lam = self._mix_pair_collate(output, batch)
else:
lam = self._mix_batch_collate(output, batch)
target = torch.tensor([b[1] for b in batch], dtype=torch.int64)
target = mixup_target(target, self.num_classes, lam, self.label_smoothing, device='cpu')
target = target[:batch_size]
return output, target
|
Cream/TinyViT/data/augmentation/mixup.py/0
|
{
"file_path": "Cream/TinyViT/data/augmentation/mixup.py",
"repo_id": "Cream",
"token_count": 8081
}
| 295 |
# --------------------------------------------------------
# TinyViT ImageNet 22k Dataset
# Copyright (c) 2022 Microsoft
# --------------------------------------------------------
import io
import os
import torch
from collections import defaultdict
from PIL import Image
import zipfile
class IN22KDataset(torch.utils.data.Dataset):
def __init__(self, data_root, transform, fname_format='{}.jpeg', debug=False):
super().__init__()
self.data_root = data_root
self.transform = transform
self.debug = debug
self.fname_format = fname_format
info_fname = os.path.join(data_root, 'in22k_image_names.txt')
assert os.path.isfile(
info_fname), f'IN22k-List filelist: {info_fname} does not exist'
folders = defaultdict(list)
with open(info_fname, 'r') as f:
for iname in f:
iname = iname.strip()
class_name = iname[:iname.index('_')]
folders[class_name].append(iname)
class_names = sorted(folders.keys())
self.nb_classes = len(class_names)
if debug:
for name in class_names:
if not name.startswith('n00007846'):
folders[name] = []
self.data = []
for cls_id, cls_name in enumerate(class_names):
self.data.extend([(iname, cls_id) for iname in folders[cls_name]])
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
iname, target = self.data[idx]
iob = self._read_file(iname)
img = Image.open(iob).convert('RGB')
if self.transform is not None:
img = self.transform(img)
return img, target
def _read_file(self, iname):
# Example:
# iname: 'n00007846_10001'
# fname: 'n00007846_10001.jpeg'
cls_name = iname[:iname.index('_')]
fname = self.fname_format.format(iname)
zip_fname = os.path.join(self.data_root, cls_name + '.zip')
handle = zipfile.ZipFile(zip_fname, 'r')
bstr = handle.read(fname)
iob = io.BytesIO(bstr)
return iob
def get_keys(self):
return [e[0] for e in self.data]
if __name__ == '__main__':
data_root = './ImageNet-22k'
def transform(x): return x
fname_format = 'imagenet22k/{}.JPEG'
dataset = IN22KDataset(data_root, transform, fname_format, debug=True)
for img, target in dataset:
print(type(img), target)
break
|
Cream/TinyViT/data/imagenet22k_dataset.py/0
|
{
"file_path": "Cream/TinyViT/data/imagenet22k_dataset.py",
"repo_id": "Cream",
"token_count": 1142
}
| 296 |
# --------------------------------------------------------
# TinyViT Model Architecture
# Copyright (c) 2022 Microsoft
# Adapted from LeViT and Swin Transformer
# LeViT: (https://github.com/facebookresearch/levit)
# Swin: (https://github.com/microsoft/swin-transformer)
# Build the TinyViT Model
# --------------------------------------------------------
import itertools
from typing import Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.checkpoint as checkpoint
import timm
from timm.models.layers import DropPath as TimmDropPath,\
to_2tuple, trunc_normal_
from timm.models.registry import register_model
try:
# timm.__version__ >= "0.6"
from timm.models._builder import build_model_with_cfg
except (ImportError, ModuleNotFoundError):
# timm.__version__ < "0.6"
from timm.models.helpers import build_model_with_cfg
class Conv2d_BN(torch.nn.Sequential):
def __init__(self, a, b, ks=1, stride=1, pad=0, dilation=1,
groups=1, bn_weight_init=1):
super().__init__()
self.add_module('c', torch.nn.Conv2d(
a, b, ks, stride, pad, dilation, groups, bias=False))
bn = torch.nn.BatchNorm2d(b)
torch.nn.init.constant_(bn.weight, bn_weight_init)
torch.nn.init.constant_(bn.bias, 0)
self.add_module('bn', bn)
@torch.no_grad()
def fuse(self):
c, bn = self._modules.values()
w = bn.weight / (bn.running_var + bn.eps)**0.5
w = c.weight * w[:, None, None, None]
b = bn.bias - bn.running_mean * bn.weight / \
(bn.running_var + bn.eps)**0.5
m = torch.nn.Conv2d(w.size(1) * self.c.groups, w.size(
0), w.shape[2:], stride=self.c.stride, padding=self.c.padding, dilation=self.c.dilation, groups=self.c.groups)
m.weight.data.copy_(w)
m.bias.data.copy_(b)
return m
class DropPath(TimmDropPath):
def __init__(self, drop_prob=None):
super().__init__(drop_prob=drop_prob)
self.drop_prob = drop_prob
def __repr__(self):
msg = super().__repr__()
msg += f'(drop_prob={self.drop_prob})'
return msg
class PatchEmbed(nn.Module):
def __init__(self, in_chans, embed_dim, resolution, activation):
super().__init__()
img_size: Tuple[int, int] = to_2tuple(resolution)
self.patches_resolution = (img_size[0] // 4, img_size[1] // 4)
self.num_patches = self.patches_resolution[0] * \
self.patches_resolution[1]
self.in_chans = in_chans
self.embed_dim = embed_dim
n = embed_dim
self.seq = nn.Sequential(
Conv2d_BN(in_chans, n // 2, 3, 2, 1),
activation(),
Conv2d_BN(n // 2, n, 3, 2, 1),
)
def forward(self, x):
return self.seq(x)
class MBConv(nn.Module):
def __init__(self, in_chans, out_chans, expand_ratio,
activation, drop_path):
super().__init__()
self.in_chans = in_chans
self.hidden_chans = int(in_chans * expand_ratio)
self.out_chans = out_chans
self.conv1 = Conv2d_BN(in_chans, self.hidden_chans, ks=1)
self.act1 = activation()
self.conv2 = Conv2d_BN(self.hidden_chans, self.hidden_chans,
ks=3, stride=1, pad=1, groups=self.hidden_chans)
self.act2 = activation()
self.conv3 = Conv2d_BN(
self.hidden_chans, out_chans, ks=1, bn_weight_init=0.0)
self.act3 = activation()
self.drop_path = DropPath(
drop_path) if drop_path > 0. else nn.Identity()
def forward(self, x):
shortcut = x
x = self.conv1(x)
x = self.act1(x)
x = self.conv2(x)
x = self.act2(x)
x = self.conv3(x)
x = self.drop_path(x)
x += shortcut
x = self.act3(x)
return x
class PatchMerging(nn.Module):
def __init__(self, input_resolution, dim, out_dim, activation):
super().__init__()
self.input_resolution = input_resolution
self.dim = dim
self.out_dim = out_dim
self.act = activation()
self.conv1 = Conv2d_BN(dim, out_dim, 1, 1, 0)
self.conv2 = Conv2d_BN(out_dim, out_dim, 3, 2, 1, groups=out_dim)
self.conv3 = Conv2d_BN(out_dim, out_dim, 1, 1, 0)
def forward(self, x):
if x.ndim == 3:
H, W = self.input_resolution
B = len(x)
# (B, C, H, W)
x = x.view(B, H, W, -1).permute(0, 3, 1, 2)
x = self.conv1(x)
x = self.act(x)
x = self.conv2(x)
x = self.act(x)
x = self.conv3(x)
x = x.flatten(2).transpose(1, 2)
return x
class ConvLayer(nn.Module):
def __init__(self, dim, input_resolution, depth,
activation,
drop_path=0., downsample=None, use_checkpoint=False,
out_dim=None,
conv_expand_ratio=4.,
):
super().__init__()
self.dim = dim
self.input_resolution = input_resolution
self.depth = depth
self.use_checkpoint = use_checkpoint
# build blocks
self.blocks = nn.ModuleList([
MBConv(dim, dim, conv_expand_ratio, activation,
drop_path[i] if isinstance(drop_path, list) else drop_path,
)
for i in range(depth)])
# patch merging layer
if downsample is not None:
self.downsample = downsample(
input_resolution, dim=dim, out_dim=out_dim, activation=activation)
else:
self.downsample = None
def forward(self, x):
for blk in self.blocks:
if self.use_checkpoint:
x = checkpoint.checkpoint(blk, x)
else:
x = blk(x)
if self.downsample is not None:
x = self.downsample(x)
return x
class Mlp(nn.Module):
def __init__(self, in_features, hidden_features=None,
out_features=None, act_layer=nn.GELU, drop=0.):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.norm = nn.LayerNorm(in_features)
self.fc1 = nn.Linear(in_features, hidden_features)
self.fc2 = nn.Linear(hidden_features, out_features)
self.act = act_layer()
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.norm(x)
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
class Attention(torch.nn.Module):
def __init__(self, dim, key_dim, num_heads=8,
attn_ratio=4,
resolution=(14, 14),
):
super().__init__()
# (h, w)
assert isinstance(resolution, tuple) and len(resolution) == 2
self.num_heads = num_heads
self.scale = key_dim ** -0.5
self.key_dim = key_dim
self.nh_kd = nh_kd = key_dim * num_heads
self.d = int(attn_ratio * key_dim)
self.dh = int(attn_ratio * key_dim) * num_heads
self.attn_ratio = attn_ratio
h = self.dh + nh_kd * 2
self.norm = nn.LayerNorm(dim)
self.qkv = nn.Linear(dim, h)
self.proj = nn.Linear(self.dh, dim)
points = list(itertools.product(
range(resolution[0]), range(resolution[1])))
N = len(points)
attention_offsets = {}
idxs = []
for p1 in points:
for p2 in points:
offset = (abs(p1[0] - p2[0]), abs(p1[1] - p2[1]))
if offset not in attention_offsets:
attention_offsets[offset] = len(attention_offsets)
idxs.append(attention_offsets[offset])
self.attention_biases = torch.nn.Parameter(
torch.zeros(num_heads, len(attention_offsets)))
self.register_buffer('attention_bias_idxs',
torch.LongTensor(idxs).view(N, N),
persistent=False)
@torch.no_grad()
def train(self, mode=True):
super().train(mode)
if mode and hasattr(self, 'ab'):
del self.ab
else:
self.ab = self.attention_biases[:, self.attention_bias_idxs]
def forward(self, x): # x (B,N,C)
B, N, _ = x.shape
# Normalization
x = self.norm(x)
qkv = self.qkv(x)
# (B, N, num_heads, d)
q, k, v = qkv.view(B, N, self.num_heads, -
1).split([self.key_dim, self.key_dim, self.d], dim=3)
# (B, num_heads, N, d)
q = q.permute(0, 2, 1, 3)
k = k.permute(0, 2, 1, 3)
v = v.permute(0, 2, 1, 3)
attn = (
(q @ k.transpose(-2, -1)) * self.scale
+
(self.attention_biases[:, self.attention_bias_idxs]
if self.training else self.ab)
)
attn = attn.softmax(dim=-1)
x = (attn @ v).transpose(1, 2).reshape(B, N, self.dh)
x = self.proj(x)
return x
class TinyViTBlock(nn.Module):
r""" TinyViT Block.
Args:
dim (int): Number of input channels.
input_resolution (tuple[int, int]): Input resulotion.
num_heads (int): Number of attention heads.
window_size (int): Window size.
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
drop (float, optional): Dropout rate. Default: 0.0
drop_path (float, optional): Stochastic depth rate. Default: 0.0
local_conv_size (int): the kernel size of the convolution between
Attention and MLP. Default: 3
activation: the activation function. Default: nn.GELU
"""
def __init__(self, dim, input_resolution, num_heads, window_size=7,
mlp_ratio=4., drop=0., drop_path=0.,
local_conv_size=3,
activation=nn.GELU,
):
super().__init__()
self.dim = dim
self.input_resolution = input_resolution
self.num_heads = num_heads
assert window_size > 0, 'window_size must be greater than 0'
self.window_size = window_size
self.mlp_ratio = mlp_ratio
self.drop_path = DropPath(
drop_path) if drop_path > 0. else nn.Identity()
assert dim % num_heads == 0, 'dim must be divisible by num_heads'
head_dim = dim // num_heads
window_resolution = (window_size, window_size)
self.attn = Attention(dim, head_dim, num_heads,
attn_ratio=1, resolution=window_resolution)
mlp_hidden_dim = int(dim * mlp_ratio)
mlp_activation = activation
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,
act_layer=mlp_activation, drop=drop)
pad = local_conv_size // 2
self.local_conv = Conv2d_BN(
dim, dim, ks=local_conv_size, stride=1, pad=pad, groups=dim)
def forward(self, x):
H, W = self.input_resolution
B, L, C = x.shape
assert L == H * W, "input feature has wrong size"
res_x = x
if H == self.window_size and W == self.window_size:
x = self.attn(x)
else:
x = x.view(B, H, W, C)
pad_b = (self.window_size - H %
self.window_size) % self.window_size
pad_r = (self.window_size - W %
self.window_size) % self.window_size
padding = pad_b > 0 or pad_r > 0
if padding:
x = F.pad(x, (0, 0, 0, pad_r, 0, pad_b))
pH, pW = H + pad_b, W + pad_r
nH = pH // self.window_size
nW = pW // self.window_size
# window partition
x = x.view(B, nH, self.window_size, nW, self.window_size, C).transpose(2, 3).reshape(
B * nH * nW, self.window_size * self.window_size, C
)
x = self.attn(x)
# window reverse
x = x.view(B, nH, nW, self.window_size, self.window_size,
C).transpose(2, 3).reshape(B, pH, pW, C)
if padding:
x = x[:, :H, :W].contiguous()
x = x.view(B, L, C)
x = res_x + self.drop_path(x)
x = x.transpose(1, 2).reshape(B, C, H, W)
x = self.local_conv(x)
x = x.view(B, C, L).transpose(1, 2)
x = x + self.drop_path(self.mlp(x))
return x
def extra_repr(self) -> str:
return f"dim={self.dim}, input_resolution={self.input_resolution}, num_heads={self.num_heads}, " \
f"window_size={self.window_size}, mlp_ratio={self.mlp_ratio}"
class BasicLayer(nn.Module):
""" A basic TinyViT layer for one stage.
Args:
dim (int): Number of input channels.
input_resolution (tuple[int]): Input resolution.
depth (int): Number of blocks.
num_heads (int): Number of attention heads.
window_size (int): Local window size.
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
drop (float, optional): Dropout rate. Default: 0.0
drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0
downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None
use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False.
local_conv_size: the kernel size of the depthwise convolution between attention and MLP. Default: 3
activation: the activation function. Default: nn.GELU
out_dim: the output dimension of the layer. Default: dim
"""
def __init__(self, dim, input_resolution, depth, num_heads, window_size,
mlp_ratio=4., drop=0.,
drop_path=0., downsample=None, use_checkpoint=False,
local_conv_size=3,
activation=nn.GELU,
out_dim=None,
):
super().__init__()
self.dim = dim
self.input_resolution = input_resolution
self.depth = depth
self.use_checkpoint = use_checkpoint
# build blocks
self.blocks = nn.ModuleList([
TinyViTBlock(dim=dim, input_resolution=input_resolution,
num_heads=num_heads, window_size=window_size,
mlp_ratio=mlp_ratio,
drop=drop,
drop_path=drop_path[i] if isinstance(
drop_path, list) else drop_path,
local_conv_size=local_conv_size,
activation=activation,
)
for i in range(depth)])
# patch merging layer
if downsample is not None:
self.downsample = downsample(
input_resolution, dim=dim, out_dim=out_dim, activation=activation)
else:
self.downsample = None
def forward(self, x):
for blk in self.blocks:
if self.use_checkpoint:
x = checkpoint.checkpoint(blk, x)
else:
x = blk(x)
if self.downsample is not None:
x = self.downsample(x)
return x
def extra_repr(self) -> str:
return f"dim={self.dim}, input_resolution={self.input_resolution}, depth={self.depth}"
class TinyViT(nn.Module):
def __init__(self, img_size=224, in_chans=3, num_classes=1000,
embed_dims=[96, 192, 384, 768], depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_sizes=[7, 7, 14, 7],
mlp_ratio=4.,
drop_rate=0.,
drop_path_rate=0.1,
use_checkpoint=False,
mbconv_expand_ratio=4.0,
local_conv_size=3,
layer_lr_decay=1.0,
):
super().__init__()
self.num_classes = num_classes
self.depths = depths
self.num_layers = len(depths)
self.mlp_ratio = mlp_ratio
activation = nn.GELU
self.patch_embed = PatchEmbed(in_chans=in_chans,
embed_dim=embed_dims[0],
resolution=img_size,
activation=activation)
patches_resolution = self.patch_embed.patches_resolution
self.patches_resolution = patches_resolution
# stochastic depth
dpr = [x.item() for x in torch.linspace(0, drop_path_rate,
sum(depths))] # stochastic depth decay rule
# build layers
self.layers = nn.ModuleList()
for i_layer in range(self.num_layers):
kwargs = dict(dim=embed_dims[i_layer],
input_resolution=(patches_resolution[0] // (2 ** i_layer),
patches_resolution[1] // (2 ** i_layer)),
depth=depths[i_layer],
drop_path=dpr[sum(depths[:i_layer]):sum(depths[:i_layer + 1])],
downsample=PatchMerging if (
i_layer < self.num_layers - 1) else None,
use_checkpoint=use_checkpoint,
out_dim=embed_dims[min(
i_layer + 1, len(embed_dims) - 1)],
activation=activation,
)
if i_layer == 0:
layer = ConvLayer(
conv_expand_ratio=mbconv_expand_ratio,
**kwargs,
)
else:
layer = BasicLayer(
num_heads=num_heads[i_layer],
window_size=window_sizes[i_layer],
mlp_ratio=self.mlp_ratio,
drop=drop_rate,
local_conv_size=local_conv_size,
**kwargs)
self.layers.append(layer)
# Classifier head
self.norm_head = nn.LayerNorm(embed_dims[-1])
self.head = nn.Linear(
embed_dims[-1], num_classes) if num_classes > 0 else torch.nn.Identity()
# init weights
self.apply(self._init_weights)
self.set_layer_lr_decay(layer_lr_decay)
def set_layer_lr_decay(self, layer_lr_decay):
decay_rate = layer_lr_decay
# layers -> blocks (depth)
depth = sum(self.depths)
lr_scales = [decay_rate ** (depth - i - 1) for i in range(depth)]
def _set_lr_scale(m, scale):
for p in m.parameters():
p.lr_scale = scale
self.patch_embed.apply(lambda x: _set_lr_scale(x, lr_scales[0]))
i = 0
for layer in self.layers:
for block in layer.blocks:
block.apply(lambda x: _set_lr_scale(x, lr_scales[i]))
i += 1
if layer.downsample is not None:
layer.downsample.apply(
lambda x: _set_lr_scale(x, lr_scales[i - 1]))
assert i == depth
for m in [self.norm_head, self.head]:
m.apply(lambda x: _set_lr_scale(x, lr_scales[-1]))
for k, p in self.named_parameters():
p.param_name = k
def _check_lr_scale(m):
for p in m.parameters():
assert hasattr(p, 'lr_scale'), p.param_name
self.apply(_check_lr_scale)
def _init_weights(self, m):
if isinstance(m, nn.Linear):
trunc_normal_(m.weight, std=.02)
if isinstance(m, nn.Linear) and m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.LayerNorm):
nn.init.constant_(m.bias, 0)
nn.init.constant_(m.weight, 1.0)
@torch.jit.ignore
def no_weight_decay_keywords(self):
return {'attention_biases'}
def forward_features(self, x):
# x: (N, C, H, W)
x = self.patch_embed(x)
x = self.layers[0](x)
start_i = 1
for i in range(start_i, len(self.layers)):
layer = self.layers[i]
x = layer(x)
x = x.mean(1)
return x
def forward(self, x):
x = self.forward_features(x)
x = self.norm_head(x)
x = self.head(x)
return x
_checkpoint_url_format = \
'https://github.com/wkcn/TinyViT-model-zoo/releases/download/checkpoints/{}.pth'
def _create_tiny_vit(variant, pretrained=False, **kwargs):
# pretrained_type: 22kto1k_distill, 1k, 22k_distill
pretrained_type = kwargs.pop('pretrained_type', '22kto1k_distill')
assert pretrained_type in ['22kto1k_distill', '1k', '22k_distill'], \
'pretrained_type should be one of 22kto1k_distill, 1k, 22k_distill'
img_size = kwargs.get('img_size', 224)
if img_size != 224:
pretrained_type = pretrained_type.replace('_', f'_{img_size}_')
num_classes_pretrained = 21841 if \
pretrained_type == '22k_distill' else 1000
variant_without_img_size = '_'.join(variant.split('_')[:-1])
cfg = dict(
url=_checkpoint_url_format.format(
f'{variant_without_img_size}_{pretrained_type}'),
num_classes=num_classes_pretrained,
classifier='head',
)
def _pretrained_filter_fn(state_dict):
state_dict = state_dict['model']
# filter out attention_bias_idxs
state_dict = {k: v for k, v in state_dict.items() if \
not k.endswith('attention_bias_idxs')}
return state_dict
if timm.__version__ >= "0.6":
return build_model_with_cfg(
TinyViT, variant, pretrained,
pretrained_cfg=cfg,
pretrained_filter_fn=_pretrained_filter_fn,
**kwargs)
else:
return build_model_with_cfg(
TinyViT, variant, pretrained,
default_cfg=cfg,
pretrained_filter_fn=_pretrained_filter_fn,
**kwargs)
@register_model
def tiny_vit_5m_224(pretrained=False, **kwargs):
model_kwargs = dict(
embed_dims=[64, 128, 160, 320],
depths=[2, 2, 6, 2],
num_heads=[2, 4, 5, 10],
window_sizes=[7, 7, 14, 7],
drop_path_rate=0.0,
)
model_kwargs.update(kwargs)
return _create_tiny_vit('tiny_vit_5m_224', pretrained, **model_kwargs)
@register_model
def tiny_vit_11m_224(pretrained=False, **kwargs):
model_kwargs = dict(
embed_dims=[64, 128, 256, 448],
depths=[2, 2, 6, 2],
num_heads=[2, 4, 8, 14],
window_sizes=[7, 7, 14, 7],
drop_path_rate=0.1,
)
model_kwargs.update(kwargs)
return _create_tiny_vit('tiny_vit_11m_224', pretrained, **model_kwargs)
@register_model
def tiny_vit_21m_224(pretrained=False, **kwargs):
model_kwargs = dict(
embed_dims=[96, 192, 384, 576],
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 18],
window_sizes=[7, 7, 14, 7],
drop_path_rate=0.2,
)
model_kwargs.update(kwargs)
return _create_tiny_vit('tiny_vit_21m_224', pretrained, **model_kwargs)
@register_model
def tiny_vit_21m_384(pretrained=False, **kwargs):
model_kwargs = dict(
img_size=384,
embed_dims=[96, 192, 384, 576],
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 18],
window_sizes=[12, 12, 24, 12],
drop_path_rate=0.1,
)
model_kwargs.update(kwargs)
return _create_tiny_vit('tiny_vit_21m_384', pretrained, **model_kwargs)
@register_model
def tiny_vit_21m_512(pretrained=False, **kwargs):
model_kwargs = dict(
img_size=512,
embed_dims=[96, 192, 384, 576],
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 18],
window_sizes=[16, 16, 32, 16],
drop_path_rate=0.1,
)
model_kwargs.update(kwargs)
return _create_tiny_vit('tiny_vit_21m_512', pretrained, **model_kwargs)
|
Cream/TinyViT/models/tiny_vit.py/0
|
{
"file_path": "Cream/TinyViT/models/tiny_vit.py",
"repo_id": "Cream",
"token_count": 12438
}
| 297 |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
Transforms and data augmentation for both image + bbox.
"""
import random
import PIL
import torch
import torchvision.transforms as T
import torchvision.transforms.functional as F
from util.box_ops import box_xyxy_to_cxcywh
from util.misc import interpolate
def crop(image, target, region):
cropped_image = F.crop(image, *region)
target = target.copy()
i, j, h, w = region
# should we do something wrt the original size?
target["size"] = torch.tensor([h, w])
fields = ["labels", "area", "iscrowd"]
if "boxes" in target:
boxes = target["boxes"]
max_size = torch.as_tensor([w, h], dtype=torch.float32)
cropped_boxes = boxes - torch.as_tensor([j, i, j, i])
cropped_boxes = torch.min(cropped_boxes.reshape(-1, 2, 2), max_size)
cropped_boxes = cropped_boxes.clamp(min=0)
area = (cropped_boxes[:, 1, :] - cropped_boxes[:, 0, :]).prod(dim=1)
target["boxes"] = cropped_boxes.reshape(-1, 4)
target["area"] = area
fields.append("boxes")
if "masks" in target:
# FIXME should we update the area here if there are no boxes?
target['masks'] = target['masks'][:, i:i + h, j:j + w]
fields.append("masks")
# remove elements for which the boxes or masks that have zero area
if "boxes" in target or "masks" in target:
# favor boxes selection when defining which elements to keep
# this is compatible with previous implementation
if "boxes" in target:
cropped_boxes = target['boxes'].reshape(-1, 2, 2)
keep = torch.all(cropped_boxes[:, 1, :] > cropped_boxes[:, 0, :], dim=1)
else:
keep = target['masks'].flatten(1).any(1)
for field in fields:
target[field] = target[field][keep]
return cropped_image, target
def hflip(image, target):
flipped_image = F.hflip(image)
w, h = image.size
target = target.copy()
if "boxes" in target:
boxes = target["boxes"]
boxes = boxes[:, [2, 1, 0, 3]] * torch.as_tensor([-1, 1, -1, 1]) + torch.as_tensor([w, 0, w, 0])
target["boxes"] = boxes
if "masks" in target:
target['masks'] = target['masks'].flip(-1)
return flipped_image, target
def resize(image, target, size, max_size=None):
# size can be min_size (scalar) or (w, h) tuple
def get_size_with_aspect_ratio(image_size, size, max_size=None):
w, h = image_size
if max_size is not None:
min_original_size = float(min((w, h)))
max_original_size = float(max((w, h)))
if max_original_size / min_original_size * size > max_size:
size = int(round(max_size * min_original_size / max_original_size))
if (w <= h and w == size) or (h <= w and h == size):
return (h, w)
if w < h:
ow = size
oh = int(size * h / w)
else:
oh = size
ow = int(size * w / h)
return (oh, ow)
def get_size(image_size, size, max_size=None):
if isinstance(size, (list, tuple)):
return size[::-1]
else:
return get_size_with_aspect_ratio(image_size, size, max_size)
size = get_size(image.size, size, max_size)
rescaled_image = F.resize(image, size)
if target is None:
return rescaled_image, None
ratios = tuple(float(s) / float(s_orig) for s, s_orig in zip(rescaled_image.size, image.size))
ratio_width, ratio_height = ratios
target = target.copy()
if "boxes" in target:
boxes = target["boxes"]
scaled_boxes = boxes * torch.as_tensor([ratio_width, ratio_height, ratio_width, ratio_height])
target["boxes"] = scaled_boxes
if "area" in target:
area = target["area"]
scaled_area = area * (ratio_width * ratio_height)
target["area"] = scaled_area
h, w = size
target["size"] = torch.tensor([h, w])
if "masks" in target:
target['masks'] = interpolate(
target['masks'][:, None].float(), size, mode="nearest")[:, 0] > 0.5
return rescaled_image, target
def pad(image, target, padding):
# assumes that we only pad on the bottom right corners
padded_image = F.pad(image, (0, 0, padding[0], padding[1]))
if target is None:
return padded_image, None
target = target.copy()
# should we do something wrt the original size?
target["size"] = torch.tensor(padded_image.size[::-1])
if "masks" in target:
target['masks'] = torch.nn.functional.pad(target['masks'], (0, padding[0], 0, padding[1]))
return padded_image, target
class RandomCrop(object):
def __init__(self, size):
self.size = size
def __call__(self, img, target):
region = T.RandomCrop.get_params(img, self.size)
return crop(img, target, region)
class RandomSizeCrop(object):
def __init__(self, min_size: int, max_size: int):
self.min_size = min_size
self.max_size = max_size
def __call__(self, img: PIL.Image.Image, target: dict):
w = random.randint(self.min_size, min(img.width, self.max_size))
h = random.randint(self.min_size, min(img.height, self.max_size))
region = T.RandomCrop.get_params(img, [h, w])
return crop(img, target, region)
class CenterCrop(object):
def __init__(self, size):
self.size = size
def __call__(self, img, target):
image_width, image_height = img.size
crop_height, crop_width = self.size
crop_top = int(round((image_height - crop_height) / 2.))
crop_left = int(round((image_width - crop_width) / 2.))
return crop(img, target, (crop_top, crop_left, crop_height, crop_width))
class RandomHorizontalFlip(object):
def __init__(self, p=0.5):
self.p = p
def __call__(self, img, target):
if random.random() < self.p:
return hflip(img, target)
return img, target
class RandomResize(object):
def __init__(self, sizes, max_size=None):
assert isinstance(sizes, (list, tuple))
self.sizes = sizes
self.max_size = max_size
def __call__(self, img, target=None):
size = random.choice(self.sizes)
return resize(img, target, size, self.max_size)
class RandomPad(object):
def __init__(self, max_pad):
self.max_pad = max_pad
def __call__(self, img, target):
pad_x = random.randint(0, self.max_pad)
pad_y = random.randint(0, self.max_pad)
return pad(img, target, (pad_x, pad_y))
class RandomSelect(object):
"""
Randomly selects between transforms1 and transforms2,
with probability p for transforms1 and (1 - p) for transforms2
"""
def __init__(self, transforms1, transforms2, p=0.5):
self.transforms1 = transforms1
self.transforms2 = transforms2
self.p = p
def __call__(self, img, target):
if random.random() < self.p:
return self.transforms1(img, target)
return self.transforms2(img, target)
class ToTensor(object):
def __call__(self, img, target):
return F.to_tensor(img), target
class RandomErasing(object):
def __init__(self, *args, **kwargs):
self.eraser = T.RandomErasing(*args, **kwargs)
def __call__(self, img, target):
return self.eraser(img), target
class Normalize(object):
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, image, target=None):
image = F.normalize(image, mean=self.mean, std=self.std)
if target is None:
return image, None
target = target.copy()
h, w = image.shape[-2:]
if "boxes" in target:
boxes = target["boxes"]
boxes = box_xyxy_to_cxcywh(boxes)
boxes = boxes / torch.tensor([w, h, w, h], dtype=torch.float32)
target["boxes"] = boxes
return image, target
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, image, target):
for t in self.transforms:
image, target = t(image, target)
return image, target
def __repr__(self):
format_string = self.__class__.__name__ + "("
for t in self.transforms:
format_string += "\n"
format_string += " {0}".format(t)
format_string += "\n)"
return format_string
|
Cream/iRPE/DETR-with-iRPE/datasets/transforms.py/0
|
{
"file_path": "Cream/iRPE/DETR-with-iRPE/datasets/transforms.py",
"repo_id": "Cream",
"token_count": 3666
}
| 298 |
#include <torch/extension.h>
#include <string>
#include <vector>
using index_t = int;
at::Tensor rpe_index_forward_cpu(torch::Tensor input, torch::Tensor index) {
/*
- Inputs
input: float32 (B, H, L_query, num_buckets)
index: index_t (L_query, L_key)
- Outputs
Y: float32 (B, H, L_query, L_key)
*/
AT_ASSERTM(input.device().is_cpu(), "input must be a CPU tensor");
AT_ASSERTM(index.device().is_cpu(), "index must be a CPU tensor");
AT_ASSERTM(input.ndimension() == 4, "input must be a 4D tensor");
AT_ASSERTM(index.ndimension() == 2, "index must be a 2D tensor");
AT_ASSERTM(index.scalar_type() == at::kInt, "index must be Int type");
const index_t B = input.size(0);
const index_t H = input.size(1);
const index_t num_buckets = input.size(3);
const index_t L_query = index.size(0);
const index_t L_key = index.size(1);
const index_t L_qk = L_query * L_key;
at::Tensor Y = at::empty({B, H, L_query, L_key}, input.options());
auto input_ = input.contiguous();
auto index_ = index.contiguous();
const index_t grain_size = 3000;
const index_t numel = Y.numel();
AT_DISPATCH_FLOATING_TYPES_AND_HALF(
input.scalar_type(), "rpe_index_forward_cpu", [&] {
const scalar_t *p_input = input_.data_ptr<scalar_t>();
const index_t *p_index = index_.data_ptr<index_t>();
scalar_t *p_Y = Y.data_ptr<scalar_t>();
at::parallel_for(0, numel, grain_size, [&](index_t begin, index_t end) {
/*
// we optimize the following function to
// reduce the number of operators, namely divide and multiply.
for (index_t i = begin; i < end; ++i) {
p_Y[i] = p_input[i / L_key * num_buckets + p_index[i % L_qk]];
}
*/
index_t aligned_begin = (begin + L_qk - 1) / L_qk * L_qk;
if (aligned_begin > end) aligned_begin = end;
index_t aligned_end = end / L_qk * L_qk;
for (index_t i = begin; i < aligned_begin; ++i) {
p_Y[i] = p_input[i / L_key * num_buckets + p_index[i % L_qk]];
}
// [aligned_begin, aligned_end)
// where aligned_begin % L_qk == 0, aligned_end % L_qk == 0
index_t base = aligned_begin / L_key * num_buckets;
const index_t base_end = aligned_end / L_key * num_buckets;
index_t i = aligned_begin;
while (base < base_end) {
for (index_t q = 0, j = 0; q < L_query; ++q) {
for (index_t k = 0; k < L_key; ++k) {
p_Y[i++] = p_input[base + p_index[j++]];
}
base += num_buckets;
}
}
for (index_t i = aligned_end; i < end; ++i) {
p_Y[i] = p_input[i / L_key * num_buckets + p_index[i % L_qk]];
}
});
});
return Y;
}
template <typename scalar_t>
inline scalar_t cpuAtomicAdd(scalar_t *address, const scalar_t val) {
#pragma omp critical
*address += val;
return *address;
}
void rpe_index_backward_cpu(torch::Tensor grad_input, torch::Tensor grad_output,
torch::Tensor index) {
/*
- Inputs
grad_output: float32 (B, H, L_query, L_key)
index: index_t (L_query, L_key)
- Outputs
grad_input: float32 (B, H, L_query, num_buckets)
*/
AT_ASSERTM(grad_input.device().is_cpu(), "grad_input must be a CPU tensor");
AT_ASSERTM(grad_output.device().is_cpu(), "grad_output must be a CPU tensor");
AT_ASSERTM(index.device().is_cpu(), "grad_index must be a CPU tensor");
AT_ASSERTM(grad_input.ndimension() == 4, "input must be a 4D tensor");
AT_ASSERTM(grad_output.ndimension() == 4, "input must be a 4D tensor");
AT_ASSERTM(index.ndimension() == 2, "index must be a 2D tensor");
AT_ASSERTM(index.scalar_type() == at::kInt, "index must be Int type");
const index_t num_buckets = grad_input.size(3);
const index_t L_query = index.size(0);
const index_t L_key = index.size(1);
const index_t L_qk = L_query * L_key;
auto grad_input_ = grad_input.contiguous();
auto grad_output_ = grad_output.contiguous();
auto index_ = index.contiguous();
const index_t grain_size = 3000;
const index_t numel = grad_output.numel();
AT_DISPATCH_FLOATING_TYPES_AND_HALF(
grad_input.scalar_type(), "rpe_index_backward_atomic_cpu", [&] {
scalar_t *p_grad_input = grad_input_.data_ptr<scalar_t>();
const index_t *p_index = index_.data_ptr<index_t>();
const scalar_t *p_grad_output = grad_output_.data_ptr<scalar_t>();
at::parallel_for(0, numel, grain_size, [&](index_t begin, index_t end) {
for (index_t i = begin; i < end; ++i) {
const index_t input_i = i / L_key * num_buckets + p_index[i % L_qk];
const scalar_t v = p_grad_output[i];
cpuAtomicAdd(p_grad_input + input_i, v);
}
});
});
}
std::string version() {
return "1.2.0";
}
PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
m.def("version", &version, "The version of the package `rpe_index_cpp`");
m.def("forward_cpu", &rpe_index_forward_cpu, "2D RPE Index Forward (CPU)");
m.def("backward_cpu", &rpe_index_backward_cpu, "2D RPE Index Backward (CPU)");
#if defined(WITH_CUDA)
at::Tensor rpe_index_forward_gpu(torch::Tensor input, torch::Tensor index);
void rpe_index_backward_gpu(torch::Tensor grad_input,
torch::Tensor grad_output, torch::Tensor index);
m.def("forward_gpu", &rpe_index_forward_gpu, "2D RPE Index Forward (GPU)");
m.def("backward_gpu", &rpe_index_backward_gpu, "2D RPE Index Backward (GPU)");
#endif
}
|
Cream/iRPE/DETR-with-iRPE/rpe_ops/rpe_index.cpp/0
|
{
"file_path": "Cream/iRPE/DETR-with-iRPE/rpe_ops/rpe_index.cpp",
"repo_id": "Cream",
"token_count": 2579
}
| 299 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
import pickle as pkl
import pprint
import time
import torch
import torch.nn.parallel
import torch.optim
from torch.utils.collect_env import get_pretty_env_info
from tensorboardX import SummaryWriter
import _init_paths
from config import config
from config import update_config
from core.function import test
from core.loss import build_criterion
from dataset import build_dataloader
from dataset import RealLabelsImagenet
from models import build_model
from utils.comm import comm
from utils.utils import create_logger
from utils.utils import init_distributed
from utils.utils import setup_cudnn
from utils.utils import summary_model_on_master
from utils.utils import strip_prefix_if_present
def parse_args():
parser = argparse.ArgumentParser(
description='Test classification network')
parser.add_argument('--cfg',
help='experiment configure file name',
required=True,
type=str)
# distributed training
parser.add_argument("--local_rank", type=int, default=0)
parser.add_argument("--port", type=int, default=9000)
parser.add_argument('opts',
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER)
args = parser.parse_args()
return args
def main():
args = parse_args()
init_distributed(args)
setup_cudnn(config)
update_config(config, args)
final_output_dir = create_logger(config, args.cfg, 'test')
tb_log_dir = final_output_dir
if comm.is_main_process():
logging.info("=> collecting env info (might take some time)")
logging.info("\n" + get_pretty_env_info())
logging.info(pprint.pformat(args))
logging.info(config)
logging.info("=> using {} GPUs".format(args.num_gpus))
output_config_path = os.path.join(final_output_dir, 'config.yaml')
logging.info("=> saving config into: {}".format(output_config_path))
model = build_model(config)
model.to(torch.device('cuda'))
model_file = config.TEST.MODEL_FILE if config.TEST.MODEL_FILE \
else os.path.join(final_output_dir, 'model_best.pth')
logging.info('=> load model file: {}'.format(model_file))
ext = model_file.split('.')[-1]
if ext == 'pth':
state_dict = torch.load(model_file, map_location="cpu")
else:
raise ValueError("Unknown model file")
model.load_state_dict(state_dict, strict=False)
model.to(torch.device('cuda'))
writer_dict = {
'writer': SummaryWriter(logdir=tb_log_dir),
'train_global_steps': 0,
'valid_global_steps': 0,
}
summary_model_on_master(model, config, final_output_dir, False)
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(
model, device_ids=[args.local_rank], output_device=args.local_rank
)
# define loss function (criterion) and optimizer
criterion = build_criterion(config, train=False)
criterion.cuda()
valid_loader = build_dataloader(config, False, args.distributed)
real_labels = None
if (
config.DATASET.DATASET == 'imagenet'
and config.DATASET.DATA_FORMAT == 'tsv'
and config.TEST.REAL_LABELS
):
filenames = valid_loader.dataset.get_filenames()
real_json = os.path.join(config.DATASET.ROOT, 'real.json')
logging.info('=> loading real labels...')
real_labels = RealLabelsImagenet(filenames, real_json)
valid_labels = None
if config.TEST.VALID_LABELS:
with open(config.TEST.VALID_LABELS, 'r') as f:
valid_labels = {
int(line.rstrip()) for line in f
}
valid_labels = [
i in valid_labels for i in range(config.MODEL.NUM_CLASSES)
]
logging.info('=> start testing')
start = time.time()
test(config, valid_loader, model, criterion,
final_output_dir, tb_log_dir, writer_dict,
args.distributed, real_labels=real_labels,
valid_labels=valid_labels)
logging.info('=> test duration time: {:.2f}s'.format(time.time()-start))
writer_dict['writer'].close()
logging.info('=> finish testing')
if __name__ == '__main__':
main()
|
CvT/tools/test.py/0
|
{
"file_path": "CvT/tools/test.py",
"repo_id": "CvT",
"token_count": 1823
}
| 300 |
# Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
the rights to use your contribution. For details, visit https://cla.microsoft.com.
When you submit a pull request, a CLA-bot will automatically determine whether you need to provide
a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions
provided by the bot. You will only need to do this once across all repos using our CLA.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
contact [[email protected]](mailto:[email protected]) with any additional questions or comments.
Users can run SR by refering sample here
https://github.com/microsoft/anomalydetector/blob/master/main.py
This sample only RUN SR, for SR-CNN please refer the below section. Both SR and SR-CNN use the same evaluation in evaluate.py.
The SR-CNN project is consisted of three major parts.<br>
1.generate_data.py is used for preprocess the data, where the original continuous time series are splited according to window size and artificial outliers are injected in proportion. <br>
`
python generate_data.py --data <dataset>
`<br>
where dataset is the file name of data folder.If you want to change the default config, you can use the command line args:<br>
`
python generate_data.py -data <dataset> --window 256 --step 128
`<br>
2.train.py is the network trianing module of SR-CNN. SR transformer is applied on each time-series before training.<br>
`
python trian.py -data <dataset>
`<br>
3.evalue.py is the evaluation module.As mentioned in our paper, <br>
`
We evaluate our model from three aspects,accuracy,efficiency and generality.We use precision,recall and F1-score to indicate the accuracy of our model.In real applications,the human operators do not care about the point-wise metrics. It is acceptable for an algorithm to trigger an alert for any point in a contiguous anomaly segment if the delay is not too long.Thus,we adopt the evaluation strategy following[23].We mark the whole segment of continuous anomalies as a positive sample which means no matter how many anomalies have been detected in this segment,only one effective detection will be counted.If any point in ananomaly segment can be detected by the algorithm,and the delay of this point is no more than k from the start point of the anomaly segment, we say this segment is detected correctly.Thus,all points in this segment are treated as correct,and the points outside the anomaly segments are treated as normal.
`<br>
we set different delays to verify whether a whole section of anomalies can be detected in time. For example, When delay = 7, for an entire segment of anomaly, if the anomaly detector can issue an alarm at its first 7 points, it is considered that the entire segment of anomaly has been successfully detected, otherwise it is considered to have not been detected.<br>
Run the code:<br>
`
python evalue.py -data <dataset>
`<br>
|
anomalydetector/README.md/0
|
{
"file_path": "anomalydetector/README.md",
"repo_id": "anomalydetector",
"token_count": 804
}
| 301 |
/* Generated by Cython 0.29.16 */
/* BEGIN: Cython Metadata
{
"distutils": {
"define_macros": [
[
"CYTHON_TRACE",
"1"
]
],
"depends": [],
"name": "msanomalydetector._anomaly_kernel_cython",
"sources": [
"msanomalydetector/_anomaly_kernel_cython.pyx"
]
},
"module_name": "msanomalydetector._anomaly_kernel_cython"
}
END: Cython Metadata */
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#ifndef Py_PYTHON_H
#error Python headers needed to compile C extensions, please install development version of Python.
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
#error Cython requires Python 2.6+ or Python 3.3+.
#else
#define CYTHON_ABI "0_29_16"
#define CYTHON_HEX_VERSION 0x001D10F0
#define CYTHON_FUTURE_DIVISION 0
#include <stddef.h>
#ifndef offsetof
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
#endif
#if !defined(WIN32) && !defined(MS_WINDOWS)
#ifndef __stdcall
#define __stdcall
#endif
#ifndef __cdecl
#define __cdecl
#endif
#ifndef __fastcall
#define __fastcall
#endif
#endif
#ifndef DL_IMPORT
#define DL_IMPORT(t) t
#endif
#ifndef DL_EXPORT
#define DL_EXPORT(t) t
#endif
#define __PYX_COMMA ,
#ifndef HAVE_LONG_LONG
#if PY_VERSION_HEX >= 0x02070000
#define HAVE_LONG_LONG
#endif
#endif
#ifndef PY_LONG_LONG
#define PY_LONG_LONG LONG_LONG
#endif
#ifndef Py_HUGE_VAL
#define Py_HUGE_VAL HUGE_VAL
#endif
#ifdef PYPY_VERSION
#define CYTHON_COMPILING_IN_PYPY 1
#define CYTHON_COMPILING_IN_PYSTON 0
#define CYTHON_COMPILING_IN_CPYTHON 0
#undef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 0
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#if PY_VERSION_HEX < 0x03050000
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#elif !defined(CYTHON_USE_ASYNC_SLOTS)
#define CYTHON_USE_ASYNC_SLOTS 1
#endif
#undef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 0
#undef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 0
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#undef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 1
#undef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 0
#undef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 0
#undef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 0
#undef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 0
#undef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT 0
#undef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE 0
#undef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS 0
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#elif defined(PYSTON_VERSION)
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_PYSTON 1
#define CYTHON_COMPILING_IN_CPYTHON 0
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
#endif
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#undef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 0
#ifndef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 1
#endif
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#ifndef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 0
#endif
#ifndef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 1
#endif
#ifndef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 1
#endif
#undef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 0
#undef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 0
#undef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT 0
#undef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE 0
#undef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS 0
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#else
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_PYSTON 0
#define CYTHON_COMPILING_IN_CPYTHON 1
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
#endif
#if PY_VERSION_HEX < 0x02070000
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
#define CYTHON_USE_PYTYPE_LOOKUP 1
#endif
#if PY_MAJOR_VERSION < 3
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#elif !defined(CYTHON_USE_ASYNC_SLOTS)
#define CYTHON_USE_ASYNC_SLOTS 1
#endif
#if PY_VERSION_HEX < 0x02070000
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#elif !defined(CYTHON_USE_PYLONG_INTERNALS)
#define CYTHON_USE_PYLONG_INTERNALS 1
#endif
#ifndef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 1
#endif
#ifndef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 1
#endif
#if PY_VERSION_HEX < 0x030300F0
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#elif !defined(CYTHON_USE_UNICODE_WRITER)
#define CYTHON_USE_UNICODE_WRITER 1
#endif
#ifndef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 0
#endif
#ifndef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 1
#endif
#ifndef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 1
#endif
#ifndef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 1
#endif
#ifndef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 1
#endif
#ifndef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
#endif
#ifndef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
#endif
#ifndef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
#endif
#ifndef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
#endif
#endif
#if !defined(CYTHON_FAST_PYCCALL)
#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
#endif
#if CYTHON_USE_PYLONG_INTERNALS
#include "longintrepr.h"
#undef SHIFT
#undef BASE
#undef MASK
#ifdef SIZEOF_VOID_P
enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };
#endif
#endif
#ifndef __has_attribute
#define __has_attribute(x) 0
#endif
#ifndef __has_cpp_attribute
#define __has_cpp_attribute(x) 0
#endif
#ifndef CYTHON_RESTRICT
#if defined(__GNUC__)
#define CYTHON_RESTRICT __restrict__
#elif defined(_MSC_VER) && _MSC_VER >= 1400
#define CYTHON_RESTRICT __restrict
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_RESTRICT restrict
#else
#define CYTHON_RESTRICT
#endif
#endif
#ifndef CYTHON_UNUSED
# if defined(__GNUC__)
# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
#endif
#ifndef CYTHON_MAYBE_UNUSED_VAR
# if defined(__cplusplus)
template<class T> void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
# else
# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
# endif
#endif
#ifndef CYTHON_NCP_UNUSED
# if CYTHON_COMPILING_IN_CPYTHON
# define CYTHON_NCP_UNUSED
# else
# define CYTHON_NCP_UNUSED CYTHON_UNUSED
# endif
#endif
#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
#ifdef _MSC_VER
#ifndef _MSC_STDINT_H_
#if _MSC_VER < 1300
typedef unsigned char uint8_t;
typedef unsigned int uint32_t;
#else
typedef unsigned __int8 uint8_t;
typedef unsigned __int32 uint32_t;
#endif
#endif
#else
#include <stdint.h>
#endif
#ifndef CYTHON_FALLTHROUGH
#if defined(__cplusplus) && __cplusplus >= 201103L
#if __has_cpp_attribute(fallthrough)
#define CYTHON_FALLTHROUGH [[fallthrough]]
#elif __has_cpp_attribute(clang::fallthrough)
#define CYTHON_FALLTHROUGH [[clang::fallthrough]]
#elif __has_cpp_attribute(gnu::fallthrough)
#define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
#endif
#endif
#ifndef CYTHON_FALLTHROUGH
#if __has_attribute(fallthrough)
#define CYTHON_FALLTHROUGH __attribute__((fallthrough))
#else
#define CYTHON_FALLTHROUGH
#endif
#endif
#if defined(__clang__ ) && defined(__apple_build_version__)
#if __apple_build_version__ < 7000000
#undef CYTHON_FALLTHROUGH
#define CYTHON_FALLTHROUGH
#endif
#endif
#endif
#ifndef CYTHON_INLINE
#if defined(__clang__)
#define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
#elif defined(__GNUC__)
#define CYTHON_INLINE __inline__
#elif defined(_MSC_VER)
#define CYTHON_INLINE __inline
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_INLINE inline
#else
#define CYTHON_INLINE
#endif
#endif
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
#define Py_OptimizeFlag 0
#endif
#define __PYX_BUILD_PY_SSIZE_T "n"
#define CYTHON_FORMAT_SSIZE_T "z"
#if PY_MAJOR_VERSION < 3
#define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#define __Pyx_DefaultClassType PyClass_Type
#else
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#else
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#endif
#define __Pyx_DefaultClassType PyType_Type
#endif
#ifndef Py_TPFLAGS_CHECKTYPES
#define Py_TPFLAGS_CHECKTYPES 0
#endif
#ifndef Py_TPFLAGS_HAVE_INDEX
#define Py_TPFLAGS_HAVE_INDEX 0
#endif
#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
#define Py_TPFLAGS_HAVE_NEWBUFFER 0
#endif
#ifndef Py_TPFLAGS_HAVE_FINALIZE
#define Py_TPFLAGS_HAVE_FINALIZE 0
#endif
#ifndef METH_STACKLESS
#define METH_STACKLESS 0
#endif
#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
#ifndef METH_FASTCALL
#define METH_FASTCALL 0x80
#endif
typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
Py_ssize_t nargs, PyObject *kwnames);
#else
#define __Pyx_PyCFunctionFast _PyCFunctionFast
#define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
#endif
#if CYTHON_FAST_PYCCALL
#define __Pyx_PyFastCFunction_Check(func)\
((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)))))
#else
#define __Pyx_PyFastCFunction_Check(func) 0
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
#define PyObject_Malloc(s) PyMem_Malloc(s)
#define PyObject_Free(p) PyMem_Free(p)
#define PyObject_Realloc(p) PyMem_Realloc(p)
#endif
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1
#define PyMem_RawMalloc(n) PyMem_Malloc(n)
#define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n)
#define PyMem_RawFree(p) PyMem_Free(p)
#endif
#if CYTHON_COMPILING_IN_PYSTON
#define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
#define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
#else
#define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
#define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
#endif
#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
#define __Pyx_PyThreadState_Current PyThreadState_GET()
#elif PY_VERSION_HEX >= 0x03060000
#define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
#elif PY_VERSION_HEX >= 0x03000000
#define __Pyx_PyThreadState_Current PyThreadState_GET()
#else
#define __Pyx_PyThreadState_Current _PyThreadState_Current
#endif
#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
#include "pythread.h"
#define Py_tss_NEEDS_INIT 0
typedef int Py_tss_t;
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
*key = PyThread_create_key();
return 0;
}
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
*key = Py_tss_NEEDS_INIT;
return key;
}
static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
PyObject_Free(key);
}
static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
return *key != Py_tss_NEEDS_INIT;
}
static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
PyThread_delete_key(*key);
*key = Py_tss_NEEDS_INIT;
}
static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
return PyThread_set_key_value(*key, value);
}
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
return PyThread_get_key_value(*key);
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
#else
#define __Pyx_PyDict_NewPresized(n) PyDict_New()
#endif
#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
#define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
#else
#define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
#endif
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
#else
#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
#endif
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
#define CYTHON_PEP393_ENABLED 1
#define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
0 : _PyUnicode_Ready((PyObject *)(op)))
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
#define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#else
#define CYTHON_PEP393_ENABLED 0
#define PyUnicode_1BYTE_KIND 1
#define PyUnicode_2BYTE_KIND 2
#define PyUnicode_4BYTE_KIND 4
#define __Pyx_PyUnicode_READY(op) (0)
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)
#define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
#define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
#define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch)
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
#endif
#if CYTHON_COMPILING_IN_PYPY
#define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
#else
#define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
#define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)
#define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
#define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
#endif
#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
#else
#define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
#endif
#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
#define PyObject_ASCII(o) PyObject_Repr(o)
#endif
#if PY_MAJOR_VERSION >= 3
#define PyBaseString_Type PyUnicode_Type
#define PyStringObject PyUnicodeObject
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#ifndef PyObject_Unicode
#define PyObject_Unicode PyObject_Str
#endif
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
#else
#define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
#define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
#endif
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#if CYTHON_ASSUME_SAFE_MACROS
#define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
#else
#define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
#endif
#if PY_MAJOR_VERSION >= 3
#define PyIntObject PyLongObject
#define PyInt_Type PyLong_Type
#define PyInt_Check(op) PyLong_Check(op)
#define PyInt_CheckExact(op) PyLong_CheckExact(op)
#define PyInt_FromString PyLong_FromString
#define PyInt_FromUnicode PyLong_FromUnicode
#define PyInt_FromLong PyLong_FromLong
#define PyInt_FromSize_t PyLong_FromSize_t
#define PyInt_FromSsize_t PyLong_FromSsize_t
#define PyInt_AsLong PyLong_AsLong
#define PyInt_AS_LONG PyLong_AS_LONG
#define PyInt_AsSsize_t PyLong_AsSsize_t
#define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
#define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
#define PyNumber_Int PyNumber_Long
#endif
#if PY_MAJOR_VERSION >= 3
#define PyBoolObject PyLongObject
#endif
#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
#ifndef PyUnicode_InternFromString
#define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
#endif
#endif
#if PY_VERSION_HEX < 0x030200A4
typedef long Py_hash_t;
#define __Pyx_PyInt_FromHash_t PyInt_FromLong
#define __Pyx_PyInt_AsHash_t PyInt_AsLong
#else
#define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
#if CYTHON_USE_ASYNC_SLOTS
#if PY_VERSION_HEX >= 0x030500B1
#define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
#define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
#else
#define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
#endif
#else
#define __Pyx_PyType_AsAsync(obj) NULL
#endif
#ifndef __Pyx_PyAsyncMethodsStruct
typedef struct {
unaryfunc am_await;
unaryfunc am_aiter;
unaryfunc am_anext;
} __Pyx_PyAsyncMethodsStruct;
#endif
#if defined(WIN32) || defined(MS_WINDOWS)
#define _USE_MATH_DEFINES
#endif
#include <math.h>
#ifdef NAN
#define __PYX_NAN() ((float) NAN)
#else
static CYTHON_INLINE float __PYX_NAN() {
float value;
memset(&value, 0xFF, sizeof(value));
return value;
}
#endif
#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
#define __Pyx_truncl trunc
#else
#define __Pyx_truncl truncl
#endif
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ \
__pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
}
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
#define __PYX_EXTERN_C extern "C"
#else
#define __PYX_EXTERN_C extern
#endif
#endif
#define __PYX_HAVE__msanomalydetector___anomaly_kernel_cython
#define __PYX_HAVE_API__msanomalydetector___anomaly_kernel_cython
/* Early includes */
#include <string.h>
#include <stdio.h>
#include "numpy/arrayobject.h"
#include "numpy/ufuncobject.h"
#include "pythread.h"
#include <stdlib.h>
#include "pystate.h"
#ifdef _OPENMP
#include <omp.h>
#endif /* _OPENMP */
#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)
#define CYTHON_WITHOUT_ASSERTIONS
#endif
typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
#define __PYX_DEFAULT_STRING_ENCODING ""
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
#define __Pyx_uchar_cast(c) ((unsigned char)c)
#define __Pyx_long_cast(x) ((long)x)
#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
(sizeof(type) < sizeof(Py_ssize_t)) ||\
(sizeof(type) > sizeof(Py_ssize_t) &&\
likely(v < (type)PY_SSIZE_T_MAX ||\
v == (type)PY_SSIZE_T_MAX) &&\
(!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
v == (type)PY_SSIZE_T_MIN))) ||\
(sizeof(type) == sizeof(Py_ssize_t) &&\
(is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
v == (type)PY_SSIZE_T_MAX))) )
static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
return (size_t) i < (size_t) limit;
}
#if defined (__cplusplus) && __cplusplus >= 201103L
#include <cstdlib>
#define __Pyx_sst_abs(value) std::abs(value)
#elif SIZEOF_INT >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) abs(value)
#elif SIZEOF_LONG >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) labs(value)
#elif defined (_MSC_VER)
#define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define __Pyx_sst_abs(value) llabs(value)
#elif defined (__GNUC__)
#define __Pyx_sst_abs(value) __builtin_llabs(value)
#else
#define __Pyx_sst_abs(value) ((value<0) ? -value : value)
#endif
static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
#define __Pyx_PyBytes_FromString PyBytes_FromString
#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
#if PY_MAJOR_VERSION < 3
#define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
#else
#define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
#endif
#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
const Py_UNICODE *u_end = u;
while (*u_end++) ;
return (size_t)(u_end - u - 1);
}
#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b);
static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
#define __Pyx_PySequence_Tuple(obj)\
(likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
#if CYTHON_ASSUME_SAFE_MACROS
#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
#else
#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
#endif
#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
#else
#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
#endif
#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
static int __Pyx_sys_getdefaultencoding_not_ascii;
static int __Pyx_init_sys_getdefaultencoding_params(void) {
PyObject* sys;
PyObject* default_encoding = NULL;
PyObject* ascii_chars_u = NULL;
PyObject* ascii_chars_b = NULL;
const char* default_encoding_c;
sys = PyImport_ImportModule("sys");
if (!sys) goto bad;
default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
Py_DECREF(sys);
if (!default_encoding) goto bad;
default_encoding_c = PyBytes_AsString(default_encoding);
if (!default_encoding_c) goto bad;
if (strcmp(default_encoding_c, "ascii") == 0) {
__Pyx_sys_getdefaultencoding_not_ascii = 0;
} else {
char ascii_chars[128];
int c;
for (c = 0; c < 128; c++) {
ascii_chars[c] = c;
}
__Pyx_sys_getdefaultencoding_not_ascii = 1;
ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
if (!ascii_chars_u) goto bad;
ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
PyErr_Format(
PyExc_ValueError,
"This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
default_encoding_c);
goto bad;
}
Py_DECREF(ascii_chars_u);
Py_DECREF(ascii_chars_b);
}
Py_DECREF(default_encoding);
return 0;
bad:
Py_XDECREF(default_encoding);
Py_XDECREF(ascii_chars_u);
Py_XDECREF(ascii_chars_b);
return -1;
}
#endif
#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
#else
#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
static char* __PYX_DEFAULT_STRING_ENCODING;
static int __Pyx_init_sys_getdefaultencoding_params(void) {
PyObject* sys;
PyObject* default_encoding = NULL;
char* default_encoding_c;
sys = PyImport_ImportModule("sys");
if (!sys) goto bad;
default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
Py_DECREF(sys);
if (!default_encoding) goto bad;
default_encoding_c = PyBytes_AsString(default_encoding);
if (!default_encoding_c) goto bad;
__PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1);
if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
Py_DECREF(default_encoding);
return 0;
bad:
Py_XDECREF(default_encoding);
return -1;
}
#endif
#endif
/* Test for GCC > 2.95 */
#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
#else /* !__GNUC__ or GCC < 2.95 */
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ */
static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }
static PyObject *__pyx_m = NULL;
static PyObject *__pyx_d;
static PyObject *__pyx_b;
static PyObject *__pyx_cython_runtime = NULL;
static PyObject *__pyx_empty_tuple;
static PyObject *__pyx_empty_bytes;
static PyObject *__pyx_empty_unicode;
static int __pyx_lineno;
static int __pyx_clineno = 0;
static const char * __pyx_cfilenm= __FILE__;
static const char *__pyx_filename;
/* Header.proto */
#if !defined(CYTHON_CCOMPLEX)
#if defined(__cplusplus)
#define CYTHON_CCOMPLEX 1
#elif defined(_Complex_I)
#define CYTHON_CCOMPLEX 1
#else
#define CYTHON_CCOMPLEX 0
#endif
#endif
#if CYTHON_CCOMPLEX
#ifdef __cplusplus
#include <complex>
#else
#include <complex.h>
#endif
#endif
#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__)
#undef _Complex_I
#define _Complex_I 1.0fj
#endif
static const char *__pyx_f[] = {
"msanomalydetector\\_anomaly_kernel_cython.pyx",
"__init__.pxd",
"stringsource",
"type.pxd",
};
/* MemviewSliceStruct.proto */
struct __pyx_memoryview_obj;
typedef struct {
struct __pyx_memoryview_obj *memview;
char *data;
Py_ssize_t shape[8];
Py_ssize_t strides[8];
Py_ssize_t suboffsets[8];
} __Pyx_memviewslice;
#define __Pyx_MemoryView_Len(m) (m.shape[0])
/* Atomics.proto */
#include <pythread.h>
#ifndef CYTHON_ATOMICS
#define CYTHON_ATOMICS 1
#endif
#define __pyx_atomic_int_type int
#if CYTHON_ATOMICS && __GNUC__ >= 4 && (__GNUC_MINOR__ > 1 ||\
(__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL >= 2)) &&\
!defined(__i386__)
#define __pyx_atomic_incr_aligned(value, lock) __sync_fetch_and_add(value, 1)
#define __pyx_atomic_decr_aligned(value, lock) __sync_fetch_and_sub(value, 1)
#ifdef __PYX_DEBUG_ATOMICS
#warning "Using GNU atomics"
#endif
#elif CYTHON_ATOMICS && defined(_MSC_VER) && 0
#include <Windows.h>
#undef __pyx_atomic_int_type
#define __pyx_atomic_int_type LONG
#define __pyx_atomic_incr_aligned(value, lock) InterlockedIncrement(value)
#define __pyx_atomic_decr_aligned(value, lock) InterlockedDecrement(value)
#ifdef __PYX_DEBUG_ATOMICS
#pragma message ("Using MSVC atomics")
#endif
#elif CYTHON_ATOMICS && (defined(__ICC) || defined(__INTEL_COMPILER)) && 0
#define __pyx_atomic_incr_aligned(value, lock) _InterlockedIncrement(value)
#define __pyx_atomic_decr_aligned(value, lock) _InterlockedDecrement(value)
#ifdef __PYX_DEBUG_ATOMICS
#warning "Using Intel atomics"
#endif
#else
#undef CYTHON_ATOMICS
#define CYTHON_ATOMICS 0
#ifdef __PYX_DEBUG_ATOMICS
#warning "Not using atomics"
#endif
#endif
typedef volatile __pyx_atomic_int_type __pyx_atomic_int;
#if CYTHON_ATOMICS
#define __pyx_add_acquisition_count(memview)\
__pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock)
#define __pyx_sub_acquisition_count(memview)\
__pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock)
#else
#define __pyx_add_acquisition_count(memview)\
__pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
#define __pyx_sub_acquisition_count(memview)\
__pyx_sub_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
#endif
/* ForceInitThreads.proto */
#ifndef __PYX_FORCE_INIT_THREADS
#define __PYX_FORCE_INIT_THREADS 0
#endif
/* NoFastGil.proto */
#define __Pyx_PyGILState_Ensure PyGILState_Ensure
#define __Pyx_PyGILState_Release PyGILState_Release
#define __Pyx_FastGIL_Remember()
#define __Pyx_FastGIL_Forget()
#define __Pyx_FastGilFuncInit()
/* BufferFormatStructs.proto */
#define IS_UNSIGNED(type) (((type) -1) > 0)
struct __Pyx_StructField_;
#define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0)
typedef struct {
const char* name;
struct __Pyx_StructField_* fields;
size_t size;
size_t arraysize[8];
int ndim;
char typegroup;
char is_unsigned;
int flags;
} __Pyx_TypeInfo;
typedef struct __Pyx_StructField_ {
__Pyx_TypeInfo* type;
const char* name;
size_t offset;
} __Pyx_StructField;
typedef struct {
__Pyx_StructField* field;
size_t parent_offset;
} __Pyx_BufFmt_StackElem;
typedef struct {
__Pyx_StructField root;
__Pyx_BufFmt_StackElem* head;
size_t fmt_offset;
size_t new_count, enc_count;
size_t struct_alignment;
int is_complex;
char enc_type;
char new_packmode;
char enc_packmode;
char is_valid_array;
} __Pyx_BufFmt_Context;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":776
* # in Cython to enable them only on the right systems.
*
* ctypedef npy_int8 int8_t # <<<<<<<<<<<<<<
* ctypedef npy_int16 int16_t
* ctypedef npy_int32 int32_t
*/
typedef npy_int8 __pyx_t_5numpy_int8_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":777
*
* ctypedef npy_int8 int8_t
* ctypedef npy_int16 int16_t # <<<<<<<<<<<<<<
* ctypedef npy_int32 int32_t
* ctypedef npy_int64 int64_t
*/
typedef npy_int16 __pyx_t_5numpy_int16_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":778
* ctypedef npy_int8 int8_t
* ctypedef npy_int16 int16_t
* ctypedef npy_int32 int32_t # <<<<<<<<<<<<<<
* ctypedef npy_int64 int64_t
* #ctypedef npy_int96 int96_t
*/
typedef npy_int32 __pyx_t_5numpy_int32_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":779
* ctypedef npy_int16 int16_t
* ctypedef npy_int32 int32_t
* ctypedef npy_int64 int64_t # <<<<<<<<<<<<<<
* #ctypedef npy_int96 int96_t
* #ctypedef npy_int128 int128_t
*/
typedef npy_int64 __pyx_t_5numpy_int64_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":783
* #ctypedef npy_int128 int128_t
*
* ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<<
* ctypedef npy_uint16 uint16_t
* ctypedef npy_uint32 uint32_t
*/
typedef npy_uint8 __pyx_t_5numpy_uint8_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":784
*
* ctypedef npy_uint8 uint8_t
* ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<<
* ctypedef npy_uint32 uint32_t
* ctypedef npy_uint64 uint64_t
*/
typedef npy_uint16 __pyx_t_5numpy_uint16_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":785
* ctypedef npy_uint8 uint8_t
* ctypedef npy_uint16 uint16_t
* ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<<
* ctypedef npy_uint64 uint64_t
* #ctypedef npy_uint96 uint96_t
*/
typedef npy_uint32 __pyx_t_5numpy_uint32_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":786
* ctypedef npy_uint16 uint16_t
* ctypedef npy_uint32 uint32_t
* ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<<
* #ctypedef npy_uint96 uint96_t
* #ctypedef npy_uint128 uint128_t
*/
typedef npy_uint64 __pyx_t_5numpy_uint64_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":790
* #ctypedef npy_uint128 uint128_t
*
* ctypedef npy_float32 float32_t # <<<<<<<<<<<<<<
* ctypedef npy_float64 float64_t
* #ctypedef npy_float80 float80_t
*/
typedef npy_float32 __pyx_t_5numpy_float32_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":791
*
* ctypedef npy_float32 float32_t
* ctypedef npy_float64 float64_t # <<<<<<<<<<<<<<
* #ctypedef npy_float80 float80_t
* #ctypedef npy_float128 float128_t
*/
typedef npy_float64 __pyx_t_5numpy_float64_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":800
* # The int types are mapped a bit surprising --
* # numpy.int corresponds to 'l' and numpy.long to 'q'
* ctypedef npy_long int_t # <<<<<<<<<<<<<<
* ctypedef npy_longlong long_t
* ctypedef npy_longlong longlong_t
*/
typedef npy_long __pyx_t_5numpy_int_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":801
* # numpy.int corresponds to 'l' and numpy.long to 'q'
* ctypedef npy_long int_t
* ctypedef npy_longlong long_t # <<<<<<<<<<<<<<
* ctypedef npy_longlong longlong_t
*
*/
typedef npy_longlong __pyx_t_5numpy_long_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":802
* ctypedef npy_long int_t
* ctypedef npy_longlong long_t
* ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<<
*
* ctypedef npy_ulong uint_t
*/
typedef npy_longlong __pyx_t_5numpy_longlong_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":804
* ctypedef npy_longlong longlong_t
*
* ctypedef npy_ulong uint_t # <<<<<<<<<<<<<<
* ctypedef npy_ulonglong ulong_t
* ctypedef npy_ulonglong ulonglong_t
*/
typedef npy_ulong __pyx_t_5numpy_uint_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":805
*
* ctypedef npy_ulong uint_t
* ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<<
* ctypedef npy_ulonglong ulonglong_t
*
*/
typedef npy_ulonglong __pyx_t_5numpy_ulong_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":806
* ctypedef npy_ulong uint_t
* ctypedef npy_ulonglong ulong_t
* ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<<
*
* ctypedef npy_intp intp_t
*/
typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":808
* ctypedef npy_ulonglong ulonglong_t
*
* ctypedef npy_intp intp_t # <<<<<<<<<<<<<<
* ctypedef npy_uintp uintp_t
*
*/
typedef npy_intp __pyx_t_5numpy_intp_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":809
*
* ctypedef npy_intp intp_t
* ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<<
*
* ctypedef npy_double float_t
*/
typedef npy_uintp __pyx_t_5numpy_uintp_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":811
* ctypedef npy_uintp uintp_t
*
* ctypedef npy_double float_t # <<<<<<<<<<<<<<
* ctypedef npy_double double_t
* ctypedef npy_longdouble longdouble_t
*/
typedef npy_double __pyx_t_5numpy_float_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":812
*
* ctypedef npy_double float_t
* ctypedef npy_double double_t # <<<<<<<<<<<<<<
* ctypedef npy_longdouble longdouble_t
*
*/
typedef npy_double __pyx_t_5numpy_double_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":813
* ctypedef npy_double float_t
* ctypedef npy_double double_t
* ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<<
*
* ctypedef npy_cfloat cfloat_t
*/
typedef npy_longdouble __pyx_t_5numpy_longdouble_t;
/* Declarations.proto */
#if CYTHON_CCOMPLEX
#ifdef __cplusplus
typedef ::std::complex< float > __pyx_t_float_complex;
#else
typedef float _Complex __pyx_t_float_complex;
#endif
#else
typedef struct { float real, imag; } __pyx_t_float_complex;
#endif
static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float);
/* Declarations.proto */
#if CYTHON_CCOMPLEX
#ifdef __cplusplus
typedef ::std::complex< double > __pyx_t_double_complex;
#else
typedef double _Complex __pyx_t_double_complex;
#endif
#else
typedef struct { double real, imag; } __pyx_t_double_complex;
#endif
static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double);
/*--- Type declarations ---*/
struct __pyx_array_obj;
struct __pyx_MemviewEnum_obj;
struct __pyx_memoryview_obj;
struct __pyx_memoryviewslice_obj;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":815
* ctypedef npy_longdouble longdouble_t
*
* ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<<
* ctypedef npy_cdouble cdouble_t
* ctypedef npy_clongdouble clongdouble_t
*/
typedef npy_cfloat __pyx_t_5numpy_cfloat_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":816
*
* ctypedef npy_cfloat cfloat_t
* ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<<
* ctypedef npy_clongdouble clongdouble_t
*
*/
typedef npy_cdouble __pyx_t_5numpy_cdouble_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":817
* ctypedef npy_cfloat cfloat_t
* ctypedef npy_cdouble cdouble_t
* ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<<
*
* ctypedef npy_cdouble complex_t
*/
typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":819
* ctypedef npy_clongdouble clongdouble_t
*
* ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<<
*
* cdef inline object PyArray_MultiIterNew1(a):
*/
typedef npy_cdouble __pyx_t_5numpy_complex_t;
struct __pyx_opt_args_17msanomalydetector_22_anomaly_kernel_cython_median_filter;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":18
* return (data[mid - 1] + data[mid])/2
*
* cpdef median_filter(np.ndarray data, int window, bint need_two_end=False): # <<<<<<<<<<<<<<
* cdef int w_len = window // 2 * 2 + 1
* cdef int t_len = len(data)
*/
struct __pyx_opt_args_17msanomalydetector_22_anomaly_kernel_cython_median_filter {
int __pyx_n;
int need_two_end;
};
/* "View.MemoryView":105
*
* @cname("__pyx_array")
* cdef class array: # <<<<<<<<<<<<<<
*
* cdef:
*/
struct __pyx_array_obj {
PyObject_HEAD
struct __pyx_vtabstruct_array *__pyx_vtab;
char *data;
Py_ssize_t len;
char *format;
int ndim;
Py_ssize_t *_shape;
Py_ssize_t *_strides;
Py_ssize_t itemsize;
PyObject *mode;
PyObject *_format;
void (*callback_free_data)(void *);
int free_data;
int dtype_is_object;
};
/* "View.MemoryView":279
*
* @cname('__pyx_MemviewEnum')
* cdef class Enum(object): # <<<<<<<<<<<<<<
* cdef object name
* def __init__(self, name):
*/
struct __pyx_MemviewEnum_obj {
PyObject_HEAD
PyObject *name;
};
/* "View.MemoryView":330
*
* @cname('__pyx_memoryview')
* cdef class memoryview(object): # <<<<<<<<<<<<<<
*
* cdef object obj
*/
struct __pyx_memoryview_obj {
PyObject_HEAD
struct __pyx_vtabstruct_memoryview *__pyx_vtab;
PyObject *obj;
PyObject *_size;
PyObject *_array_interface;
PyThread_type_lock lock;
__pyx_atomic_int acquisition_count[2];
__pyx_atomic_int *acquisition_count_aligned_p;
Py_buffer view;
int flags;
int dtype_is_object;
__Pyx_TypeInfo *typeinfo;
};
/* "View.MemoryView":965
*
* @cname('__pyx_memoryviewslice')
* cdef class _memoryviewslice(memoryview): # <<<<<<<<<<<<<<
* "Internal class for passing memoryview slices to Python"
*
*/
struct __pyx_memoryviewslice_obj {
struct __pyx_memoryview_obj __pyx_base;
__Pyx_memviewslice from_slice;
PyObject *from_object;
PyObject *(*to_object_func)(char *);
int (*to_dtype_func)(char *, PyObject *);
};
/* "View.MemoryView":105
*
* @cname("__pyx_array")
* cdef class array: # <<<<<<<<<<<<<<
*
* cdef:
*/
struct __pyx_vtabstruct_array {
PyObject *(*get_memview)(struct __pyx_array_obj *);
};
static struct __pyx_vtabstruct_array *__pyx_vtabptr_array;
/* "View.MemoryView":330
*
* @cname('__pyx_memoryview')
* cdef class memoryview(object): # <<<<<<<<<<<<<<
*
* cdef object obj
*/
struct __pyx_vtabstruct_memoryview {
char *(*get_item_pointer)(struct __pyx_memoryview_obj *, PyObject *);
PyObject *(*is_slice)(struct __pyx_memoryview_obj *, PyObject *);
PyObject *(*setitem_slice_assignment)(struct __pyx_memoryview_obj *, PyObject *, PyObject *);
PyObject *(*setitem_slice_assign_scalar)(struct __pyx_memoryview_obj *, struct __pyx_memoryview_obj *, PyObject *);
PyObject *(*setitem_indexed)(struct __pyx_memoryview_obj *, PyObject *, PyObject *);
PyObject *(*convert_item_to_object)(struct __pyx_memoryview_obj *, char *);
PyObject *(*assign_item_from_object)(struct __pyx_memoryview_obj *, char *, PyObject *);
};
static struct __pyx_vtabstruct_memoryview *__pyx_vtabptr_memoryview;
/* "View.MemoryView":965
*
* @cname('__pyx_memoryviewslice')
* cdef class _memoryviewslice(memoryview): # <<<<<<<<<<<<<<
* "Internal class for passing memoryview slices to Python"
*
*/
struct __pyx_vtabstruct__memoryviewslice {
struct __pyx_vtabstruct_memoryview __pyx_base;
};
static struct __pyx_vtabstruct__memoryviewslice *__pyx_vtabptr__memoryviewslice;
/* --- Runtime support code (head) --- */
/* Refnanny.proto */
#ifndef CYTHON_REFNANNY
#define CYTHON_REFNANNY 0
#endif
#if CYTHON_REFNANNY
typedef struct {
void (*INCREF)(void*, PyObject*, int);
void (*DECREF)(void*, PyObject*, int);
void (*GOTREF)(void*, PyObject*, int);
void (*GIVEREF)(void*, PyObject*, int);
void* (*SetupContext)(const char*, int, const char*);
void (*FinishContext)(void**);
} __Pyx_RefNannyAPIStruct;
static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
#define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
#ifdef WITH_THREAD
#define __Pyx_RefNannySetupContext(name, acquire_gil)\
if (acquire_gil) {\
PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
PyGILState_Release(__pyx_gilstate_save);\
} else {\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
}
#else
#define __Pyx_RefNannySetupContext(name, acquire_gil)\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
#endif
#define __Pyx_RefNannyFinishContext()\
__Pyx_RefNanny->FinishContext(&__pyx_refnanny)
#define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
#define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
#define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
#define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
#else
#define __Pyx_RefNannyDeclarations
#define __Pyx_RefNannySetupContext(name, acquire_gil)
#define __Pyx_RefNannyFinishContext()
#define __Pyx_INCREF(r) Py_INCREF(r)
#define __Pyx_DECREF(r) Py_DECREF(r)
#define __Pyx_GOTREF(r)
#define __Pyx_GIVEREF(r)
#define __Pyx_XINCREF(r) Py_XINCREF(r)
#define __Pyx_XDECREF(r) Py_XDECREF(r)
#define __Pyx_XGOTREF(r)
#define __Pyx_XGIVEREF(r)
#endif
#define __Pyx_XDECREF_SET(r, v) do {\
PyObject *tmp = (PyObject *) r;\
r = v; __Pyx_XDECREF(tmp);\
} while (0)
#define __Pyx_DECREF_SET(r, v) do {\
PyObject *tmp = (PyObject *) r;\
r = v; __Pyx_DECREF(tmp);\
} while (0)
#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
/* PyObjectGetAttrStr.proto */
#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
#endif
/* GetBuiltinName.proto */
static PyObject *__Pyx_GetBuiltinName(PyObject *name);
/* PyObjectCall.proto */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw);
#else
#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw)
#endif
/* PyThreadStateGet.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current;
#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type
#else
#define __Pyx_PyThreadState_declare
#define __Pyx_PyThreadState_assign
#define __Pyx_PyErr_Occurred() PyErr_Occurred()
#endif
/* PyErrFetchRestore.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)
#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
#else
#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
#endif
#else
#define __Pyx_PyErr_Clear() PyErr_Clear()
#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
#endif
/* RaiseException.proto */
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause);
/* None.proto */
static CYTHON_INLINE long __Pyx_mod_long(long, long);
/* None.proto */
static CYTHON_INLINE long __Pyx_div_long(long, long);
/* BufferIndexError.proto */
static void __Pyx_RaiseBufferIndexError(int axis);
/* WriteUnraisableException.proto */
static void __Pyx_WriteUnraisable(const char *name, int clineno,
int lineno, const char *filename,
int full_traceback, int nogil);
/* RaiseArgTupleInvalid.proto */
static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);
/* RaiseDoubleKeywords.proto */
static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);
/* ParseKeywords.proto */
static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\
PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\
const char* function_name);
/* None.proto */
static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname);
/* MemviewSliceInit.proto */
#define __Pyx_BUF_MAX_NDIMS %(BUF_MAX_NDIMS)d
#define __Pyx_MEMVIEW_DIRECT 1
#define __Pyx_MEMVIEW_PTR 2
#define __Pyx_MEMVIEW_FULL 4
#define __Pyx_MEMVIEW_CONTIG 8
#define __Pyx_MEMVIEW_STRIDED 16
#define __Pyx_MEMVIEW_FOLLOW 32
#define __Pyx_IS_C_CONTIG 1
#define __Pyx_IS_F_CONTIG 2
static int __Pyx_init_memviewslice(
struct __pyx_memoryview_obj *memview,
int ndim,
__Pyx_memviewslice *memviewslice,
int memview_is_new_reference);
static CYTHON_INLINE int __pyx_add_acquisition_count_locked(
__pyx_atomic_int *acquisition_count, PyThread_type_lock lock);
static CYTHON_INLINE int __pyx_sub_acquisition_count_locked(
__pyx_atomic_int *acquisition_count, PyThread_type_lock lock);
#define __pyx_get_slice_count_pointer(memview) (memview->acquisition_count_aligned_p)
#define __pyx_get_slice_count(memview) (*__pyx_get_slice_count_pointer(memview))
#define __PYX_INC_MEMVIEW(slice, have_gil) __Pyx_INC_MEMVIEW(slice, have_gil, __LINE__)
#define __PYX_XDEC_MEMVIEW(slice, have_gil) __Pyx_XDEC_MEMVIEW(slice, have_gil, __LINE__)
static CYTHON_INLINE void __Pyx_INC_MEMVIEW(__Pyx_memviewslice *, int, int);
static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW(__Pyx_memviewslice *, int, int);
/* PyDictVersioning.proto */
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
(version_var) = __PYX_GET_DICT_VERSION(dict);\
(cache_var) = (value);
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
static PY_UINT64_T __pyx_dict_version = 0;\
static PyObject *__pyx_dict_cached_value = NULL;\
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
(VAR) = __pyx_dict_cached_value;\
} else {\
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
}\
}
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
#else
#define __PYX_GET_DICT_VERSION(dict) (0)
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
#endif
/* GetModuleGlobalName.proto */
#if CYTHON_USE_DICT_VERSIONS
#define __Pyx_GetModuleGlobalName(var, name) {\
static PY_UINT64_T __pyx_dict_version = 0;\
static PyObject *__pyx_dict_cached_value = NULL;\
(var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\
(likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\
__Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
}
#define __Pyx_GetModuleGlobalNameUncached(var, name) {\
PY_UINT64_T __pyx_dict_version;\
PyObject *__pyx_dict_cached_value;\
(var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
}
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value);
#else
#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name)
#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name)
static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name);
#endif
/* ListCompAppend.proto */
#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS
static CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) {
PyListObject* L = (PyListObject*) list;
Py_ssize_t len = Py_SIZE(list);
if (likely(L->allocated > len)) {
Py_INCREF(x);
PyList_SET_ITEM(list, len, x);
Py_SIZE(list) = len+1;
return 0;
}
return PyList_Append(list, x);
}
#else
#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x)
#endif
/* PyFunctionFastCall.proto */
#if CYTHON_FAST_PYCALL
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
#if 1 || PY_VERSION_HEX < 0x030600B1
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);
#else
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
#endif
#define __Pyx_BUILD_ASSERT_EXPR(cond)\
(sizeof(char [1 - 2*!(cond)]) - 1)
#ifndef Py_MEMBER_SIZE
#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)
#endif
static size_t __pyx_pyframe_localsplus_offset = 0;
#include "frameobject.h"
#define __Pxy_PyFrame_Initialize_Offsets()\
((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\
(void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))
#define __Pyx_PyFrame_GetLocalsplus(frame)\
(assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))
#endif
/* PyCFunctionFastCall.proto */
#if CYTHON_FAST_PYCCALL
static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs);
#else
#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL)
#endif
/* PyObjectCallMethO.proto */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg);
#endif
/* PyObjectCallOneArg.proto */
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg);
/* GetItemInt.proto */
#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
(__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
__Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\
(is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\
__Pyx_GetItemInt_Generic(o, to_py_func(i))))
#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
(__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
__Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\
(PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL))
static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,
int wraparound, int boundscheck);
#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\
(__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\
__Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\
(PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL))
static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,
int wraparound, int boundscheck);
static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j);
static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,
int is_list, int wraparound, int boundscheck);
/* ObjectGetItem.proto */
#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key);
#else
#define __Pyx_PyObject_GetItem(obj, key) PyObject_GetItem(obj, key)
#endif
/* PyIntBinop.proto */
#if !CYTHON_COMPILING_IN_PYPY
static PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, long intval, int inplace, int zerodivision_check);
#else
#define __Pyx_PyInt_AddObjC(op1, op2, intval, inplace, zerodivision_check)\
(inplace ? PyNumber_InPlaceAdd(op1, op2) : PyNumber_Add(op1, op2))
#endif
/* PyIntBinop.proto */
#if !CYTHON_COMPILING_IN_PYPY
static PyObject* __Pyx_PyInt_SubtractObjC(PyObject *op1, PyObject *op2, long intval, int inplace, int zerodivision_check);
#else
#define __Pyx_PyInt_SubtractObjC(op1, op2, intval, inplace, zerodivision_check)\
(inplace ? PyNumber_InPlaceSubtract(op1, op2) : PyNumber_Subtract(op1, op2))
#endif
/* ArgTypeTest.proto */
#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\
((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\
__Pyx__ArgTypeTest(obj, type, name, exact))
static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact);
/* DictGetItem.proto */
#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key);
#define __Pyx_PyObject_Dict_GetItem(obj, name)\
(likely(PyDict_CheckExact(obj)) ?\
__Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name))
#else
#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key)
#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name)
#endif
/* RaiseTooManyValuesToUnpack.proto */
static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
/* RaiseNeedMoreValuesToUnpack.proto */
static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
/* RaiseNoneIterError.proto */
static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void);
/* ExtTypeTest.proto */
static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type);
/* GetTopmostException.proto */
#if CYTHON_USE_EXC_INFO_STACK
static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate);
#endif
/* SaveResetException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
#else
#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb)
#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb)
#endif
/* PyErrExceptionMatches.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err)
static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err);
#else
#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err)
#endif
/* GetException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb)
static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#else
static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb);
#endif
/* PyObjectCall2Args.proto */
static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2);
/* IncludeStringH.proto */
#include <string.h>
/* BytesEquals.proto */
static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals);
/* UnicodeEquals.proto */
static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals);
/* StrEquals.proto */
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals
#else
#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals
#endif
/* None.proto */
static CYTHON_INLINE Py_ssize_t __Pyx_div_Py_ssize_t(Py_ssize_t, Py_ssize_t);
/* UnaryNegOverflows.proto */
#define UNARY_NEG_WOULD_OVERFLOW(x)\
(((x) < 0) & ((unsigned long)(x) == 0-(unsigned long)(x)))
static CYTHON_UNUSED int __pyx_array_getbuffer(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/
static PyObject *__pyx_array_get_memview(struct __pyx_array_obj *); /*proto*/
/* GetAttr.proto */
static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *);
/* decode_c_string_utf16.proto */
static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) {
int byteorder = 0;
return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
}
static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) {
int byteorder = -1;
return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
}
static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) {
int byteorder = 1;
return PyUnicode_DecodeUTF16(s, size, errors, &byteorder);
}
/* decode_c_string.proto */
static CYTHON_INLINE PyObject* __Pyx_decode_c_string(
const char* cstring, Py_ssize_t start, Py_ssize_t stop,
const char* encoding, const char* errors,
PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors));
/* GetAttr3.proto */
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *);
/* SwapException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#else
static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb);
#endif
/* Import.proto */
static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level);
/* FastTypeChecks.proto */
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);
#else
#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
#endif
#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
static CYTHON_UNUSED int __pyx_memoryview_getbuffer(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/
/* ListExtend.proto */
static CYTHON_INLINE int __Pyx_PyList_Extend(PyObject* L, PyObject* v) {
#if CYTHON_COMPILING_IN_CPYTHON
PyObject* none = _PyList_Extend((PyListObject*)L, v);
if (unlikely(!none))
return -1;
Py_DECREF(none);
return 0;
#else
return PyList_SetSlice(L, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, v);
#endif
}
/* ListAppend.proto */
#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS
static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) {
PyListObject* L = (PyListObject*) list;
Py_ssize_t len = Py_SIZE(list);
if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) {
Py_INCREF(x);
PyList_SET_ITEM(list, len, x);
Py_SIZE(list) = len+1;
return 0;
}
return PyList_Append(list, x);
}
#else
#define __Pyx_PyList_Append(L,x) PyList_Append(L,x)
#endif
/* ImportFrom.proto */
static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name);
/* HasAttr.proto */
static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *);
/* PyObject_GenericGetAttrNoDict.proto */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr
#endif
/* PyObject_GenericGetAttr.proto */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr
#endif
/* SetVTable.proto */
static int __Pyx_SetVtable(PyObject *dict, void *vtable);
/* SetupReduce.proto */
static int __Pyx_setup_reduce(PyObject* type_obj);
/* TypeImport.proto */
#ifndef __PYX_HAVE_RT_ImportType_proto
#define __PYX_HAVE_RT_ImportType_proto
enum __Pyx_ImportType_CheckSize {
__Pyx_ImportType_CheckSize_Error = 0,
__Pyx_ImportType_CheckSize_Warn = 1,
__Pyx_ImportType_CheckSize_Ignore = 2
};
static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size);
#endif
/* CLineInTraceback.proto */
#ifdef CYTHON_CLINE_IN_TRACEBACK
#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
#else
static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);
#endif
/* CodeObjectCache.proto */
typedef struct {
PyCodeObject* code_object;
int code_line;
} __Pyx_CodeObjectCacheEntry;
struct __Pyx_CodeObjectCache {
int count;
int max_count;
__Pyx_CodeObjectCacheEntry* entries;
};
static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
static PyCodeObject *__pyx_find_code_object(int code_line);
static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
/* AddTraceback.proto */
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename);
#if PY_MAJOR_VERSION < 3
static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags);
static void __Pyx_ReleaseBuffer(Py_buffer *view);
#else
#define __Pyx_GetBuffer PyObject_GetBuffer
#define __Pyx_ReleaseBuffer PyBuffer_Release
#endif
/* BufferStructDeclare.proto */
typedef struct {
Py_ssize_t shape, strides, suboffsets;
} __Pyx_Buf_DimInfo;
typedef struct {
size_t refcount;
Py_buffer pybuffer;
} __Pyx_Buffer;
typedef struct {
__Pyx_Buffer *rcbuffer;
char *data;
__Pyx_Buf_DimInfo diminfo[8];
} __Pyx_LocalBuf_ND;
/* MemviewSliceIsContig.proto */
static int __pyx_memviewslice_is_contig(const __Pyx_memviewslice mvs, char order, int ndim);
/* OverlappingSlices.proto */
static int __pyx_slices_overlap(__Pyx_memviewslice *slice1,
__Pyx_memviewslice *slice2,
int ndim, size_t itemsize);
/* Capsule.proto */
static CYTHON_INLINE PyObject *__pyx_capsule_create(void *p, const char *sig);
/* IsLittleEndian.proto */
static CYTHON_INLINE int __Pyx_Is_Little_Endian(void);
/* BufferFormatCheck.proto */
static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts);
static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx,
__Pyx_BufFmt_StackElem* stack,
__Pyx_TypeInfo* type);
/* TypeInfoCompare.proto */
static int __pyx_typeinfo_cmp(__Pyx_TypeInfo *a, __Pyx_TypeInfo *b);
/* MemviewSliceValidateAndInit.proto */
static int __Pyx_ValidateAndInit_memviewslice(
int *axes_specs,
int c_or_f_flag,
int buf_flags,
int ndim,
__Pyx_TypeInfo *dtype,
__Pyx_BufFmt_StackElem stack[],
__Pyx_memviewslice *memviewslice,
PyObject *original_obj);
/* ObjectToMemviewSlice.proto */
static CYTHON_INLINE __Pyx_memviewslice __Pyx_PyObject_to_MemoryviewSlice_ds_float(PyObject *, int writable_flag);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);
/* MemviewDtypeToObject.proto */
static CYTHON_INLINE PyObject *__pyx_memview_get_float(const char *itemp);
static CYTHON_INLINE int __pyx_memview_set_float(const char *itemp, PyObject *obj);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
/* RealImag.proto */
#if CYTHON_CCOMPLEX
#ifdef __cplusplus
#define __Pyx_CREAL(z) ((z).real())
#define __Pyx_CIMAG(z) ((z).imag())
#else
#define __Pyx_CREAL(z) (__real__(z))
#define __Pyx_CIMAG(z) (__imag__(z))
#endif
#else
#define __Pyx_CREAL(z) ((z).real)
#define __Pyx_CIMAG(z) ((z).imag)
#endif
#if defined(__cplusplus) && CYTHON_CCOMPLEX\
&& (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103)
#define __Pyx_SET_CREAL(z,x) ((z).real(x))
#define __Pyx_SET_CIMAG(z,y) ((z).imag(y))
#else
#define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x)
#define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y)
#endif
/* Arithmetic.proto */
#if CYTHON_CCOMPLEX
#define __Pyx_c_eq_float(a, b) ((a)==(b))
#define __Pyx_c_sum_float(a, b) ((a)+(b))
#define __Pyx_c_diff_float(a, b) ((a)-(b))
#define __Pyx_c_prod_float(a, b) ((a)*(b))
#define __Pyx_c_quot_float(a, b) ((a)/(b))
#define __Pyx_c_neg_float(a) (-(a))
#ifdef __cplusplus
#define __Pyx_c_is_zero_float(z) ((z)==(float)0)
#define __Pyx_c_conj_float(z) (::std::conj(z))
#if 1
#define __Pyx_c_abs_float(z) (::std::abs(z))
#define __Pyx_c_pow_float(a, b) (::std::pow(a, b))
#endif
#else
#define __Pyx_c_is_zero_float(z) ((z)==0)
#define __Pyx_c_conj_float(z) (conjf(z))
#if 1
#define __Pyx_c_abs_float(z) (cabsf(z))
#define __Pyx_c_pow_float(a, b) (cpowf(a, b))
#endif
#endif
#else
static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex, __pyx_t_float_complex);
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex, __pyx_t_float_complex);
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex, __pyx_t_float_complex);
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex, __pyx_t_float_complex);
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex, __pyx_t_float_complex);
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex);
static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex);
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex);
#if 1
static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex);
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex, __pyx_t_float_complex);
#endif
#endif
/* Arithmetic.proto */
#if CYTHON_CCOMPLEX
#define __Pyx_c_eq_double(a, b) ((a)==(b))
#define __Pyx_c_sum_double(a, b) ((a)+(b))
#define __Pyx_c_diff_double(a, b) ((a)-(b))
#define __Pyx_c_prod_double(a, b) ((a)*(b))
#define __Pyx_c_quot_double(a, b) ((a)/(b))
#define __Pyx_c_neg_double(a) (-(a))
#ifdef __cplusplus
#define __Pyx_c_is_zero_double(z) ((z)==(double)0)
#define __Pyx_c_conj_double(z) (::std::conj(z))
#if 1
#define __Pyx_c_abs_double(z) (::std::abs(z))
#define __Pyx_c_pow_double(a, b) (::std::pow(a, b))
#endif
#else
#define __Pyx_c_is_zero_double(z) ((z)==0)
#define __Pyx_c_conj_double(z) (conj(z))
#if 1
#define __Pyx_c_abs_double(z) (cabs(z))
#define __Pyx_c_pow_double(a, b) (cpow(a, b))
#endif
#endif
#else
static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex, __pyx_t_double_complex);
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex, __pyx_t_double_complex);
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex, __pyx_t_double_complex);
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex, __pyx_t_double_complex);
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex, __pyx_t_double_complex);
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex);
static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex);
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex);
#if 1
static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex);
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex, __pyx_t_double_complex);
#endif
#endif
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value);
/* MemviewSliceCopyTemplate.proto */
static __Pyx_memviewslice
__pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs,
const char *mode, int ndim,
size_t sizeof_dtype, int contig_flag,
int dtype_is_object);
/* CIntFromPy.proto */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
/* CIntFromPy.proto */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
/* CIntFromPy.proto */
static CYTHON_INLINE char __Pyx_PyInt_As_char(PyObject *);
/* CheckBinaryVersion.proto */
static int __Pyx_check_binary_version(void);
/* InitStrings.proto */
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
static PyObject *__pyx_array_get_memview(struct __pyx_array_obj *__pyx_v_self); /* proto*/
static char *__pyx_memoryview_get_item_pointer(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_index); /* proto*/
static PyObject *__pyx_memoryview_is_slice(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_obj); /* proto*/
static PyObject *__pyx_memoryview_setitem_slice_assignment(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_dst, PyObject *__pyx_v_src); /* proto*/
static PyObject *__pyx_memoryview_setitem_slice_assign_scalar(struct __pyx_memoryview_obj *__pyx_v_self, struct __pyx_memoryview_obj *__pyx_v_dst, PyObject *__pyx_v_value); /* proto*/
static PyObject *__pyx_memoryview_setitem_indexed(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /* proto*/
static PyObject *__pyx_memoryview_convert_item_to_object(struct __pyx_memoryview_obj *__pyx_v_self, char *__pyx_v_itemp); /* proto*/
static PyObject *__pyx_memoryview_assign_item_from_object(struct __pyx_memoryview_obj *__pyx_v_self, char *__pyx_v_itemp, PyObject *__pyx_v_value); /* proto*/
static PyObject *__pyx_memoryviewslice_convert_item_to_object(struct __pyx_memoryviewslice_obj *__pyx_v_self, char *__pyx_v_itemp); /* proto*/
static PyObject *__pyx_memoryviewslice_assign_item_from_object(struct __pyx_memoryviewslice_obj *__pyx_v_self, char *__pyx_v_itemp, PyObject *__pyx_v_value); /* proto*/
/* Module declarations from 'cpython.buffer' */
/* Module declarations from 'libc.string' */
/* Module declarations from 'libc.stdio' */
/* Module declarations from '__builtin__' */
/* Module declarations from 'cpython.type' */
static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0;
/* Module declarations from 'cpython' */
/* Module declarations from 'cpython.object' */
/* Module declarations from 'cpython.ref' */
/* Module declarations from 'cpython.mem' */
/* Module declarations from 'numpy' */
/* Module declarations from 'numpy' */
static PyTypeObject *__pyx_ptype_5numpy_dtype = 0;
static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0;
static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0;
static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0;
static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0;
static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/
/* Module declarations from 'msanomalydetector._anomaly_kernel_cython' */
static PyTypeObject *__pyx_array_type = 0;
static PyTypeObject *__pyx_MemviewEnum_type = 0;
static PyTypeObject *__pyx_memoryview_type = 0;
static PyTypeObject *__pyx_memoryviewslice_type = 0;
static PyObject *generic = 0;
static PyObject *strided = 0;
static PyObject *indirect = 0;
static PyObject *contiguous = 0;
static PyObject *indirect_contiguous = 0;
static int __pyx_memoryview_thread_locks_used;
static PyThread_type_lock __pyx_memoryview_thread_locks[8];
static float __pyx_f_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(__Pyx_memviewslice, int, int, int __pyx_skip_dispatch); /*proto*/
static PyObject *__pyx_f_17msanomalydetector_22_anomaly_kernel_cython_median_filter(PyArrayObject *, int, int __pyx_skip_dispatch, struct __pyx_opt_args_17msanomalydetector_22_anomaly_kernel_cython_median_filter *__pyx_optional_args); /*proto*/
static struct __pyx_array_obj *__pyx_array_new(PyObject *, Py_ssize_t, char *, char *, char *); /*proto*/
static void *__pyx_align_pointer(void *, size_t); /*proto*/
static PyObject *__pyx_memoryview_new(PyObject *, int, int, __Pyx_TypeInfo *); /*proto*/
static CYTHON_INLINE int __pyx_memoryview_check(PyObject *); /*proto*/
static PyObject *_unellipsify(PyObject *, int); /*proto*/
static PyObject *assert_direct_dimensions(Py_ssize_t *, int); /*proto*/
static struct __pyx_memoryview_obj *__pyx_memview_slice(struct __pyx_memoryview_obj *, PyObject *); /*proto*/
static int __pyx_memoryview_slice_memviewslice(__Pyx_memviewslice *, Py_ssize_t, Py_ssize_t, Py_ssize_t, int, int, int *, Py_ssize_t, Py_ssize_t, Py_ssize_t, int, int, int, int); /*proto*/
static char *__pyx_pybuffer_index(Py_buffer *, char *, Py_ssize_t, Py_ssize_t); /*proto*/
static int __pyx_memslice_transpose(__Pyx_memviewslice *); /*proto*/
static PyObject *__pyx_memoryview_fromslice(__Pyx_memviewslice, int, PyObject *(*)(char *), int (*)(char *, PyObject *), int); /*proto*/
static __Pyx_memviewslice *__pyx_memoryview_get_slice_from_memoryview(struct __pyx_memoryview_obj *, __Pyx_memviewslice *); /*proto*/
static void __pyx_memoryview_slice_copy(struct __pyx_memoryview_obj *, __Pyx_memviewslice *); /*proto*/
static PyObject *__pyx_memoryview_copy_object(struct __pyx_memoryview_obj *); /*proto*/
static PyObject *__pyx_memoryview_copy_object_from_slice(struct __pyx_memoryview_obj *, __Pyx_memviewslice *); /*proto*/
static Py_ssize_t abs_py_ssize_t(Py_ssize_t); /*proto*/
static char __pyx_get_best_slice_order(__Pyx_memviewslice *, int); /*proto*/
static void _copy_strided_to_strided(char *, Py_ssize_t *, char *, Py_ssize_t *, Py_ssize_t *, Py_ssize_t *, int, size_t); /*proto*/
static void copy_strided_to_strided(__Pyx_memviewslice *, __Pyx_memviewslice *, int, size_t); /*proto*/
static Py_ssize_t __pyx_memoryview_slice_get_size(__Pyx_memviewslice *, int); /*proto*/
static Py_ssize_t __pyx_fill_contig_strides_array(Py_ssize_t *, Py_ssize_t *, Py_ssize_t, int, char); /*proto*/
static void *__pyx_memoryview_copy_data_to_temp(__Pyx_memviewslice *, __Pyx_memviewslice *, char, int); /*proto*/
static int __pyx_memoryview_err_extents(int, Py_ssize_t, Py_ssize_t); /*proto*/
static int __pyx_memoryview_err_dim(PyObject *, char *, int); /*proto*/
static int __pyx_memoryview_err(PyObject *, char *); /*proto*/
static int __pyx_memoryview_copy_contents(__Pyx_memviewslice, __Pyx_memviewslice, int, int, int); /*proto*/
static void __pyx_memoryview_broadcast_leading(__Pyx_memviewslice *, int, int); /*proto*/
static void __pyx_memoryview_refcount_copying(__Pyx_memviewslice *, int, int, int); /*proto*/
static void __pyx_memoryview_refcount_objects_in_slice_with_gil(char *, Py_ssize_t *, Py_ssize_t *, int, int); /*proto*/
static void __pyx_memoryview_refcount_objects_in_slice(char *, Py_ssize_t *, Py_ssize_t *, int, int); /*proto*/
static void __pyx_memoryview_slice_assign_scalar(__Pyx_memviewslice *, int, size_t, void *, int); /*proto*/
static void __pyx_memoryview__slice_assign_scalar(char *, Py_ssize_t *, Py_ssize_t *, int, size_t, void *); /*proto*/
static PyObject *__pyx_unpickle_Enum__set_state(struct __pyx_MemviewEnum_obj *, PyObject *); /*proto*/
static __Pyx_TypeInfo __Pyx_TypeInfo_float = { "float", NULL, sizeof(float), { 0 }, 0, 'R', 0, 0 };
#define __Pyx_MODULE_NAME "msanomalydetector._anomaly_kernel_cython"
extern int __pyx_module_is_main_msanomalydetector___anomaly_kernel_cython;
int __pyx_module_is_main_msanomalydetector___anomaly_kernel_cython = 0;
/* Implementation of 'msanomalydetector._anomaly_kernel_cython' */
static PyObject *__pyx_builtin_range;
static PyObject *__pyx_builtin_ValueError;
static PyObject *__pyx_builtin_RuntimeError;
static PyObject *__pyx_builtin_ImportError;
static PyObject *__pyx_builtin_MemoryError;
static PyObject *__pyx_builtin_enumerate;
static PyObject *__pyx_builtin_TypeError;
static PyObject *__pyx_builtin_Ellipsis;
static PyObject *__pyx_builtin_id;
static PyObject *__pyx_builtin_IndexError;
static const char __pyx_k_O[] = "O";
static const char __pyx_k_c[] = "c";
static const char __pyx_k_f[] = "f";
static const char __pyx_k_i[] = "i";
static const char __pyx_k_j[] = "j";
static const char __pyx_k_id[] = "id";
static const char __pyx_k_np[] = "np";
static const char __pyx_k_new[] = "__new__";
static const char __pyx_k_obj[] = "obj";
static const char __pyx_k_base[] = "base";
static const char __pyx_k_data[] = "data";
static const char __pyx_k_dict[] = "__dict__";
static const char __pyx_k_main[] = "__main__";
static const char __pyx_k_mode[] = "mode";
static const char __pyx_k_name[] = "name";
static const char __pyx_k_ndim[] = "ndim";
static const char __pyx_k_pack[] = "pack";
static const char __pyx_k_size[] = "size";
static const char __pyx_k_step[] = "step";
static const char __pyx_k_stop[] = "stop";
static const char __pyx_k_test[] = "__test__";
static const char __pyx_k_ASCII[] = "ASCII";
static const char __pyx_k_array[] = "array";
static const char __pyx_k_class[] = "__class__";
static const char __pyx_k_error[] = "error";
static const char __pyx_k_flags[] = "flags";
static const char __pyx_k_numpy[] = "numpy";
static const char __pyx_k_range[] = "range";
static const char __pyx_k_shape[] = "shape";
static const char __pyx_k_start[] = "start";
static const char __pyx_k_bisect[] = "bisect";
static const char __pyx_k_encode[] = "encode";
static const char __pyx_k_format[] = "format";
static const char __pyx_k_import[] = "__import__";
static const char __pyx_k_name_2[] = "__name__";
static const char __pyx_k_pickle[] = "pickle";
static const char __pyx_k_reduce[] = "__reduce__";
static const char __pyx_k_struct[] = "struct";
static const char __pyx_k_unpack[] = "unpack";
static const char __pyx_k_update[] = "update";
static const char __pyx_k_window[] = "window";
static const char __pyx_k_fortran[] = "fortran";
static const char __pyx_k_memview[] = "memview";
static const char __pyx_k_Ellipsis[] = "Ellipsis";
static const char __pyx_k_getstate[] = "__getstate__";
static const char __pyx_k_itemsize[] = "itemsize";
static const char __pyx_k_pyx_type[] = "__pyx_type";
static const char __pyx_k_setstate[] = "__setstate__";
static const char __pyx_k_TypeError[] = "TypeError";
static const char __pyx_k_enumerate[] = "enumerate";
static const char __pyx_k_pyx_state[] = "__pyx_state";
static const char __pyx_k_reduce_ex[] = "__reduce_ex__";
static const char __pyx_k_IndexError[] = "IndexError";
static const char __pyx_k_ValueError[] = "ValueError";
static const char __pyx_k_pyx_result[] = "__pyx_result";
static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__";
static const char __pyx_k_ImportError[] = "ImportError";
static const char __pyx_k_MemoryError[] = "MemoryError";
static const char __pyx_k_PickleError[] = "PickleError";
static const char __pyx_k_RuntimeError[] = "RuntimeError";
static const char __pyx_k_bisect_right[] = "bisect_right";
static const char __pyx_k_need_two_end[] = "need_two_end";
static const char __pyx_k_pyx_checksum[] = "__pyx_checksum";
static const char __pyx_k_stringsource[] = "stringsource";
static const char __pyx_k_pyx_getbuffer[] = "__pyx_getbuffer";
static const char __pyx_k_reduce_cython[] = "__reduce_cython__";
static const char __pyx_k_View_MemoryView[] = "View.MemoryView";
static const char __pyx_k_allocate_buffer[] = "allocate_buffer";
static const char __pyx_k_dtype_is_object[] = "dtype_is_object";
static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError";
static const char __pyx_k_setstate_cython[] = "__setstate_cython__";
static const char __pyx_k_pyx_unpickle_Enum[] = "__pyx_unpickle_Enum";
static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
static const char __pyx_k_strided_and_direct[] = "<strided and direct>";
static const char __pyx_k_strided_and_indirect[] = "<strided and indirect>";
static const char __pyx_k_contiguous_and_direct[] = "<contiguous and direct>";
static const char __pyx_k_MemoryView_of_r_object[] = "<MemoryView of %r object>";
static const char __pyx_k_MemoryView_of_r_at_0x_x[] = "<MemoryView of %r at 0x%x>";
static const char __pyx_k_contiguous_and_indirect[] = "<contiguous and indirect>";
static const char __pyx_k_Cannot_index_with_type_s[] = "Cannot index with type '%s'";
static const char __pyx_k_no_median_for_empty_data[] = "no median for empty data";
static const char __pyx_k_Invalid_shape_in_axis_d_d[] = "Invalid shape in axis %d: %d.";
static const char __pyx_k_itemsize_0_for_cython_array[] = "itemsize <= 0 for cython.array";
static const char __pyx_k_ndarray_is_not_C_contiguous[] = "ndarray is not C contiguous";
static const char __pyx_k_unable_to_allocate_array_data[] = "unable to allocate array data.";
static const char __pyx_k_strided_and_direct_or_indirect[] = "<strided and direct or indirect>";
static const char __pyx_k_numpy_core_multiarray_failed_to[] = "numpy.core.multiarray failed to import";
static const char __pyx_k_unknown_dtype_code_in_numpy_pxd[] = "unknown dtype code in numpy.pxd (%d)";
static const char __pyx_k_Buffer_view_does_not_expose_stri[] = "Buffer view does not expose strides";
static const char __pyx_k_Can_only_create_a_buffer_that_is[] = "Can only create a buffer that is contiguous in memory.";
static const char __pyx_k_Cannot_assign_to_read_only_memor[] = "Cannot assign to read-only memoryview";
static const char __pyx_k_Cannot_create_writable_memory_vi[] = "Cannot create writable memory view from read-only memoryview";
static const char __pyx_k_Empty_shape_tuple_for_cython_arr[] = "Empty shape tuple for cython.array";
static const char __pyx_k_Format_string_allocated_too_shor[] = "Format string allocated too short, see comment in numpy.pxd";
static const char __pyx_k_Incompatible_checksums_s_vs_0xb0[] = "Incompatible checksums (%s vs 0xb068931 = (name))";
static const char __pyx_k_Indirect_dimensions_not_supporte[] = "Indirect dimensions not supported";
static const char __pyx_k_Invalid_mode_expected_c_or_fortr[] = "Invalid mode, expected 'c' or 'fortran', got %s";
static const char __pyx_k_Non_native_byte_order_not_suppor[] = "Non-native byte order not supported";
static const char __pyx_k_Out_of_bounds_on_buffer_access_a[] = "Out of bounds on buffer access (axis %d)";
static const char __pyx_k_Unable_to_convert_item_to_object[] = "Unable to convert item to object";
static const char __pyx_k_got_differing_extents_in_dimensi[] = "got differing extents in dimension %d (got %d and %d)";
static const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = "ndarray is not Fortran contiguous";
static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__";
static const char __pyx_k_numpy_core_umath_failed_to_impor[] = "numpy.core.umath failed to import";
static const char __pyx_k_unable_to_allocate_shape_and_str[] = "unable to allocate shape and strides.";
static const char __pyx_k_Format_string_allocated_too_shor_2[] = "Format string allocated too short.";
static PyObject *__pyx_n_s_ASCII;
static PyObject *__pyx_kp_s_Buffer_view_does_not_expose_stri;
static PyObject *__pyx_kp_s_Can_only_create_a_buffer_that_is;
static PyObject *__pyx_kp_s_Cannot_assign_to_read_only_memor;
static PyObject *__pyx_kp_s_Cannot_create_writable_memory_vi;
static PyObject *__pyx_kp_s_Cannot_index_with_type_s;
static PyObject *__pyx_n_s_Ellipsis;
static PyObject *__pyx_kp_s_Empty_shape_tuple_for_cython_arr;
static PyObject *__pyx_kp_u_Format_string_allocated_too_shor;
static PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2;
static PyObject *__pyx_n_s_ImportError;
static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0xb0;
static PyObject *__pyx_n_s_IndexError;
static PyObject *__pyx_kp_s_Indirect_dimensions_not_supporte;
static PyObject *__pyx_kp_s_Invalid_mode_expected_c_or_fortr;
static PyObject *__pyx_kp_s_Invalid_shape_in_axis_d_d;
static PyObject *__pyx_n_s_MemoryError;
static PyObject *__pyx_kp_s_MemoryView_of_r_at_0x_x;
static PyObject *__pyx_kp_s_MemoryView_of_r_object;
static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor;
static PyObject *__pyx_n_b_O;
static PyObject *__pyx_kp_s_Out_of_bounds_on_buffer_access_a;
static PyObject *__pyx_n_s_PickleError;
static PyObject *__pyx_n_s_RuntimeError;
static PyObject *__pyx_n_s_TypeError;
static PyObject *__pyx_kp_s_Unable_to_convert_item_to_object;
static PyObject *__pyx_n_s_ValueError;
static PyObject *__pyx_n_s_View_MemoryView;
static PyObject *__pyx_n_s_allocate_buffer;
static PyObject *__pyx_n_s_array;
static PyObject *__pyx_n_s_base;
static PyObject *__pyx_n_s_bisect;
static PyObject *__pyx_n_s_bisect_right;
static PyObject *__pyx_n_s_c;
static PyObject *__pyx_n_u_c;
static PyObject *__pyx_n_s_class;
static PyObject *__pyx_n_s_cline_in_traceback;
static PyObject *__pyx_kp_s_contiguous_and_direct;
static PyObject *__pyx_kp_s_contiguous_and_indirect;
static PyObject *__pyx_n_s_data;
static PyObject *__pyx_n_s_dict;
static PyObject *__pyx_n_s_dtype_is_object;
static PyObject *__pyx_n_s_encode;
static PyObject *__pyx_n_s_enumerate;
static PyObject *__pyx_n_s_error;
static PyObject *__pyx_n_s_f;
static PyObject *__pyx_n_s_flags;
static PyObject *__pyx_n_s_format;
static PyObject *__pyx_n_s_fortran;
static PyObject *__pyx_n_u_fortran;
static PyObject *__pyx_n_s_getstate;
static PyObject *__pyx_kp_s_got_differing_extents_in_dimensi;
static PyObject *__pyx_n_s_i;
static PyObject *__pyx_n_s_id;
static PyObject *__pyx_n_s_import;
static PyObject *__pyx_n_s_itemsize;
static PyObject *__pyx_kp_s_itemsize_0_for_cython_array;
static PyObject *__pyx_n_s_j;
static PyObject *__pyx_n_s_main;
static PyObject *__pyx_n_s_memview;
static PyObject *__pyx_n_s_mode;
static PyObject *__pyx_n_s_name;
static PyObject *__pyx_n_s_name_2;
static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous;
static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou;
static PyObject *__pyx_n_s_ndim;
static PyObject *__pyx_n_s_need_two_end;
static PyObject *__pyx_n_s_new;
static PyObject *__pyx_kp_s_no_default___reduce___due_to_non;
static PyObject *__pyx_kp_s_no_median_for_empty_data;
static PyObject *__pyx_n_s_np;
static PyObject *__pyx_n_s_numpy;
static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to;
static PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor;
static PyObject *__pyx_n_s_obj;
static PyObject *__pyx_n_s_pack;
static PyObject *__pyx_n_s_pickle;
static PyObject *__pyx_n_s_pyx_PickleError;
static PyObject *__pyx_n_s_pyx_checksum;
static PyObject *__pyx_n_s_pyx_getbuffer;
static PyObject *__pyx_n_s_pyx_result;
static PyObject *__pyx_n_s_pyx_state;
static PyObject *__pyx_n_s_pyx_type;
static PyObject *__pyx_n_s_pyx_unpickle_Enum;
static PyObject *__pyx_n_s_pyx_vtable;
static PyObject *__pyx_n_s_range;
static PyObject *__pyx_n_s_reduce;
static PyObject *__pyx_n_s_reduce_cython;
static PyObject *__pyx_n_s_reduce_ex;
static PyObject *__pyx_n_s_setstate;
static PyObject *__pyx_n_s_setstate_cython;
static PyObject *__pyx_n_s_shape;
static PyObject *__pyx_n_s_size;
static PyObject *__pyx_n_s_start;
static PyObject *__pyx_n_s_step;
static PyObject *__pyx_n_s_stop;
static PyObject *__pyx_kp_s_strided_and_direct;
static PyObject *__pyx_kp_s_strided_and_direct_or_indirect;
static PyObject *__pyx_kp_s_strided_and_indirect;
static PyObject *__pyx_kp_s_stringsource;
static PyObject *__pyx_n_s_struct;
static PyObject *__pyx_n_s_test;
static PyObject *__pyx_kp_s_unable_to_allocate_array_data;
static PyObject *__pyx_kp_s_unable_to_allocate_shape_and_str;
static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd;
static PyObject *__pyx_n_s_unpack;
static PyObject *__pyx_n_s_update;
static PyObject *__pyx_n_s_window;
static PyObject *__pyx_pf_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(CYTHON_UNUSED PyObject *__pyx_self, __Pyx_memviewslice __pyx_v_data, int __pyx_v_i, int __pyx_v_j); /* proto */
static PyObject *__pyx_pf_17msanomalydetector_22_anomaly_kernel_cython_2median_filter(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_data, int __pyx_v_window, int __pyx_v_need_two_end); /* proto */
static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */
static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */
static int __pyx_array___pyx_pf_15View_dot_MemoryView_5array___cinit__(struct __pyx_array_obj *__pyx_v_self, PyObject *__pyx_v_shape, Py_ssize_t __pyx_v_itemsize, PyObject *__pyx_v_format, PyObject *__pyx_v_mode, int __pyx_v_allocate_buffer); /* proto */
static int __pyx_array___pyx_pf_15View_dot_MemoryView_5array_2__getbuffer__(struct __pyx_array_obj *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */
static void __pyx_array___pyx_pf_15View_dot_MemoryView_5array_4__dealloc__(struct __pyx_array_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_5array_7memview___get__(struct __pyx_array_obj *__pyx_v_self); /* proto */
static Py_ssize_t __pyx_array___pyx_pf_15View_dot_MemoryView_5array_6__len__(struct __pyx_array_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_array___pyx_pf_15View_dot_MemoryView_5array_8__getattr__(struct __pyx_array_obj *__pyx_v_self, PyObject *__pyx_v_attr); /* proto */
static PyObject *__pyx_array___pyx_pf_15View_dot_MemoryView_5array_10__getitem__(struct __pyx_array_obj *__pyx_v_self, PyObject *__pyx_v_item); /* proto */
static int __pyx_array___pyx_pf_15View_dot_MemoryView_5array_12__setitem__(struct __pyx_array_obj *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_value); /* proto */
static PyObject *__pyx_pf___pyx_array___reduce_cython__(CYTHON_UNUSED struct __pyx_array_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf___pyx_array_2__setstate_cython__(CYTHON_UNUSED struct __pyx_array_obj *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
static int __pyx_MemviewEnum___pyx_pf_15View_dot_MemoryView_4Enum___init__(struct __pyx_MemviewEnum_obj *__pyx_v_self, PyObject *__pyx_v_name); /* proto */
static PyObject *__pyx_MemviewEnum___pyx_pf_15View_dot_MemoryView_4Enum_2__repr__(struct __pyx_MemviewEnum_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf___pyx_MemviewEnum___reduce_cython__(struct __pyx_MemviewEnum_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf___pyx_MemviewEnum_2__setstate_cython__(struct __pyx_MemviewEnum_obj *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */
static int __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview___cinit__(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_obj, int __pyx_v_flags, int __pyx_v_dtype_is_object); /* proto */
static void __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_2__dealloc__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_4__getitem__(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_index); /* proto */
static int __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_6__setitem__(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /* proto */
static int __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_8__getbuffer__(struct __pyx_memoryview_obj *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_1T___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_4base___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_5shape___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_7strides___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_10suboffsets___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_4ndim___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_8itemsize___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_6nbytes___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_4size___get__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static Py_ssize_t __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_10__len__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_12__repr__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_14__str__(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_16is_c_contig(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_18is_f_contig(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_20copy(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_22copy_fortran(struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf___pyx_memoryview___reduce_cython__(CYTHON_UNUSED struct __pyx_memoryview_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf___pyx_memoryview_2__setstate_cython__(CYTHON_UNUSED struct __pyx_memoryview_obj *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
static void __pyx_memoryviewslice___pyx_pf_15View_dot_MemoryView_16_memoryviewslice___dealloc__(struct __pyx_memoryviewslice_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView_16_memoryviewslice_4base___get__(struct __pyx_memoryviewslice_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf___pyx_memoryviewslice___reduce_cython__(CYTHON_UNUSED struct __pyx_memoryviewslice_obj *__pyx_v_self); /* proto */
static PyObject *__pyx_pf___pyx_memoryviewslice_2__setstate_cython__(CYTHON_UNUSED struct __pyx_memoryviewslice_obj *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
static PyObject *__pyx_pf_15View_dot_MemoryView___pyx_unpickle_Enum(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */
static PyObject *__pyx_tp_new_array(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static PyObject *__pyx_tp_new_Enum(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static PyObject *__pyx_tp_new_memoryview(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static PyObject *__pyx_tp_new__memoryviewslice(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static PyObject *__pyx_int_0;
static PyObject *__pyx_int_1;
static PyObject *__pyx_int_184977713;
static PyObject *__pyx_int_neg_1;
static PyObject *__pyx_tuple_;
static PyObject *__pyx_tuple__2;
static PyObject *__pyx_tuple__3;
static PyObject *__pyx_tuple__4;
static PyObject *__pyx_tuple__5;
static PyObject *__pyx_tuple__6;
static PyObject *__pyx_tuple__7;
static PyObject *__pyx_tuple__8;
static PyObject *__pyx_tuple__9;
static PyObject *__pyx_slice__23;
static PyObject *__pyx_tuple__10;
static PyObject *__pyx_tuple__11;
static PyObject *__pyx_tuple__12;
static PyObject *__pyx_tuple__13;
static PyObject *__pyx_tuple__14;
static PyObject *__pyx_tuple__15;
static PyObject *__pyx_tuple__16;
static PyObject *__pyx_tuple__17;
static PyObject *__pyx_tuple__18;
static PyObject *__pyx_tuple__19;
static PyObject *__pyx_tuple__20;
static PyObject *__pyx_tuple__21;
static PyObject *__pyx_tuple__22;
static PyObject *__pyx_tuple__24;
static PyObject *__pyx_tuple__25;
static PyObject *__pyx_tuple__26;
static PyObject *__pyx_tuple__27;
static PyObject *__pyx_tuple__28;
static PyObject *__pyx_tuple__29;
static PyObject *__pyx_tuple__30;
static PyObject *__pyx_tuple__31;
static PyObject *__pyx_tuple__32;
static PyObject *__pyx_codeobj__33;
/* Late includes */
/* "msanomalydetector/_anomaly_kernel_cython.pyx":7
*
*
* cpdef float sorted_median(float[:] data, int i, int j): # <<<<<<<<<<<<<<
* cdef int n = j - i
* cdef int mid
*/
static PyObject *__pyx_pw_17msanomalydetector_22_anomaly_kernel_cython_1sorted_median(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static float __pyx_f_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(__Pyx_memviewslice __pyx_v_data, int __pyx_v_i, int __pyx_v_j, CYTHON_UNUSED int __pyx_skip_dispatch) {
int __pyx_v_n;
int __pyx_v_mid;
float __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
Py_ssize_t __pyx_t_3;
int __pyx_t_4;
Py_ssize_t __pyx_t_5;
Py_ssize_t __pyx_t_6;
__Pyx_RefNannySetupContext("sorted_median", 0);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":8
*
* cpdef float sorted_median(float[:] data, int i, int j):
* cdef int n = j - i # <<<<<<<<<<<<<<
* cdef int mid
* if n == 0:
*/
__pyx_v_n = (__pyx_v_j - __pyx_v_i);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":10
* cdef int n = j - i
* cdef int mid
* if n == 0: # <<<<<<<<<<<<<<
* raise Exception("no median for empty data")
* if n % 2 == 1:
*/
__pyx_t_1 = ((__pyx_v_n == 0) != 0);
if (unlikely(__pyx_t_1)) {
/* "msanomalydetector/_anomaly_kernel_cython.pyx":11
* cdef int mid
* if n == 0:
* raise Exception("no median for empty data") # <<<<<<<<<<<<<<
* if n % 2 == 1:
* return data[i + n // 2]
*/
__pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])), __pyx_tuple_, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_Raise(__pyx_t_2, 0, 0, 0);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__PYX_ERR(0, 11, __pyx_L1_error)
/* "msanomalydetector/_anomaly_kernel_cython.pyx":10
* cdef int n = j - i
* cdef int mid
* if n == 0: # <<<<<<<<<<<<<<
* raise Exception("no median for empty data")
* if n % 2 == 1:
*/
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":12
* if n == 0:
* raise Exception("no median for empty data")
* if n % 2 == 1: # <<<<<<<<<<<<<<
* return data[i + n // 2]
* else:
*/
__pyx_t_1 = ((__Pyx_mod_long(__pyx_v_n, 2) == 1) != 0);
if (__pyx_t_1) {
/* "msanomalydetector/_anomaly_kernel_cython.pyx":13
* raise Exception("no median for empty data")
* if n % 2 == 1:
* return data[i + n // 2] # <<<<<<<<<<<<<<
* else:
* mid = i + n // 2
*/
__pyx_t_3 = (__pyx_v_i + __Pyx_div_long(__pyx_v_n, 2));
__pyx_t_4 = -1;
if (__pyx_t_3 < 0) {
__pyx_t_3 += __pyx_v_data.shape[0];
if (unlikely(__pyx_t_3 < 0)) __pyx_t_4 = 0;
} else if (unlikely(__pyx_t_3 >= __pyx_v_data.shape[0])) __pyx_t_4 = 0;
if (unlikely(__pyx_t_4 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_4);
__PYX_ERR(0, 13, __pyx_L1_error)
}
__pyx_r = (*((float *) ( /* dim=0 */ (__pyx_v_data.data + __pyx_t_3 * __pyx_v_data.strides[0]) )));
goto __pyx_L0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":12
* if n == 0:
* raise Exception("no median for empty data")
* if n % 2 == 1: # <<<<<<<<<<<<<<
* return data[i + n // 2]
* else:
*/
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":15
* return data[i + n // 2]
* else:
* mid = i + n // 2 # <<<<<<<<<<<<<<
* return (data[mid - 1] + data[mid])/2
*
*/
/*else*/ {
__pyx_v_mid = (__pyx_v_i + __Pyx_div_long(__pyx_v_n, 2));
/* "msanomalydetector/_anomaly_kernel_cython.pyx":16
* else:
* mid = i + n // 2
* return (data[mid - 1] + data[mid])/2 # <<<<<<<<<<<<<<
*
* cpdef median_filter(np.ndarray data, int window, bint need_two_end=False):
*/
__pyx_t_5 = (__pyx_v_mid - 1);
__pyx_t_4 = -1;
if (__pyx_t_5 < 0) {
__pyx_t_5 += __pyx_v_data.shape[0];
if (unlikely(__pyx_t_5 < 0)) __pyx_t_4 = 0;
} else if (unlikely(__pyx_t_5 >= __pyx_v_data.shape[0])) __pyx_t_4 = 0;
if (unlikely(__pyx_t_4 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_4);
__PYX_ERR(0, 16, __pyx_L1_error)
}
__pyx_t_6 = __pyx_v_mid;
__pyx_t_4 = -1;
if (__pyx_t_6 < 0) {
__pyx_t_6 += __pyx_v_data.shape[0];
if (unlikely(__pyx_t_6 < 0)) __pyx_t_4 = 0;
} else if (unlikely(__pyx_t_6 >= __pyx_v_data.shape[0])) __pyx_t_4 = 0;
if (unlikely(__pyx_t_4 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_4);
__PYX_ERR(0, 16, __pyx_L1_error)
}
__pyx_r = (((*((float *) ( /* dim=0 */ (__pyx_v_data.data + __pyx_t_5 * __pyx_v_data.strides[0]) ))) + (*((float *) ( /* dim=0 */ (__pyx_v_data.data + __pyx_t_6 * __pyx_v_data.strides[0]) )))) / 2.0);
goto __pyx_L0;
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":7
*
*
* cpdef float sorted_median(float[:] data, int i, int j): # <<<<<<<<<<<<<<
* cdef int n = j - i
* cdef int mid
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_WriteUnraisable("msanomalydetector._anomaly_kernel_cython.sorted_median", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0);
__pyx_r = 0;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* Python wrapper */
static PyObject *__pyx_pw_17msanomalydetector_22_anomaly_kernel_cython_1sorted_median(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static PyObject *__pyx_pw_17msanomalydetector_22_anomaly_kernel_cython_1sorted_median(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
__Pyx_memviewslice __pyx_v_data = { 0, 0, { 0 }, { 0 }, { 0 } };
int __pyx_v_i;
int __pyx_v_j;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("sorted_median (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_data,&__pyx_n_s_i,&__pyx_n_s_j,0};
PyObject* values[3] = {0,0,0};
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
CYTHON_FALLTHROUGH;
case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
CYTHON_FALLTHROUGH;
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
CYTHON_FALLTHROUGH;
case 1:
if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_i)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("sorted_median", 1, 3, 3, 1); __PYX_ERR(0, 7, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 2:
if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_j)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("sorted_median", 1, 3, 3, 2); __PYX_ERR(0, 7, __pyx_L3_error)
}
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "sorted_median") < 0)) __PYX_ERR(0, 7, __pyx_L3_error)
}
} else if (PyTuple_GET_SIZE(__pyx_args) != 3) {
goto __pyx_L5_argtuple_error;
} else {
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
}
__pyx_v_data = __Pyx_PyObject_to_MemoryviewSlice_ds_float(values[0], PyBUF_WRITABLE); if (unlikely(!__pyx_v_data.memview)) __PYX_ERR(0, 7, __pyx_L3_error)
__pyx_v_i = __Pyx_PyInt_As_int(values[1]); if (unlikely((__pyx_v_i == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 7, __pyx_L3_error)
__pyx_v_j = __Pyx_PyInt_As_int(values[2]); if (unlikely((__pyx_v_j == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 7, __pyx_L3_error)
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("sorted_median", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("msanomalydetector._anomaly_kernel_cython.sorted_median", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return NULL;
__pyx_L4_argument_unpacking_done:;
__pyx_r = __pyx_pf_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(__pyx_self, __pyx_v_data, __pyx_v_i, __pyx_v_j);
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(CYTHON_UNUSED PyObject *__pyx_self, __Pyx_memviewslice __pyx_v_data, int __pyx_v_i, int __pyx_v_j) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("sorted_median", 0);
__Pyx_XDECREF(__pyx_r);
if (unlikely(!__pyx_v_data.memview)) { __Pyx_RaiseUnboundLocalError("data"); __PYX_ERR(0, 7, __pyx_L1_error) }
__pyx_t_1 = PyFloat_FromDouble(__pyx_f_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(__pyx_v_data, __pyx_v_i, __pyx_v_j, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("msanomalydetector._anomaly_kernel_cython.sorted_median", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__PYX_XDEC_MEMVIEW(&__pyx_v_data, 1);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":18
* return (data[mid - 1] + data[mid])/2
*
* cpdef median_filter(np.ndarray data, int window, bint need_two_end=False): # <<<<<<<<<<<<<<
* cdef int w_len = window // 2 * 2 + 1
* cdef int t_len = len(data)
*/
static PyObject *__pyx_pw_17msanomalydetector_22_anomaly_kernel_cython_3median_filter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static PyObject *__pyx_f_17msanomalydetector_22_anomaly_kernel_cython_median_filter(PyArrayObject *__pyx_v_data, int __pyx_v_window, CYTHON_UNUSED int __pyx_skip_dispatch, struct __pyx_opt_args_17msanomalydetector_22_anomaly_kernel_cython_median_filter *__pyx_optional_args) {
int __pyx_v_need_two_end = ((int)0);
int __pyx_v_w_len;
int __pyx_v_t_len;
__Pyx_memviewslice __pyx_v_val = { 0, 0, { 0 }, { 0 }, { 0 } };
__Pyx_memviewslice __pyx_v_ans = { 0, 0, { 0 }, { 0 }, { 0 } };
__Pyx_memviewslice __pyx_v_cur_windows = { 0, 0, { 0 }, { 0 }, { 0 } };
int __pyx_v_delete_id;
int __pyx_v_add_id;
int __pyx_v_index;
PyObject *__pyx_v_i = NULL;
PyObject *__pyx_v_x = NULL;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
Py_ssize_t __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *(*__pyx_t_6)(PyObject *);
PyObject *__pyx_t_7 = NULL;
int __pyx_t_8;
__Pyx_memviewslice __pyx_t_9 = { 0, 0, { 0 }, { 0 }, { 0 } };
int __pyx_t_10;
Py_ssize_t __pyx_t_11;
Py_ssize_t __pyx_t_12;
PyObject *__pyx_t_13 = NULL;
PyObject *__pyx_t_14 = NULL;
Py_ssize_t __pyx_t_15;
Py_ssize_t __pyx_t_16;
float __pyx_t_17;
Py_ssize_t __pyx_t_18;
int __pyx_t_19;
Py_ssize_t __pyx_t_20;
int __pyx_t_21;
Py_ssize_t __pyx_t_22;
Py_ssize_t __pyx_t_23;
Py_ssize_t __pyx_t_24;
Py_ssize_t __pyx_t_25;
Py_ssize_t __pyx_t_26;
Py_ssize_t __pyx_t_27;
Py_ssize_t __pyx_t_28;
Py_ssize_t __pyx_t_29;
Py_ssize_t __pyx_t_30;
Py_ssize_t __pyx_t_31;
Py_ssize_t __pyx_t_32;
Py_ssize_t __pyx_t_33;
__Pyx_RefNannySetupContext("median_filter", 0);
if (__pyx_optional_args) {
if (__pyx_optional_args->__pyx_n > 0) {
__pyx_v_need_two_end = __pyx_optional_args->need_two_end;
}
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":19
*
* cpdef median_filter(np.ndarray data, int window, bint need_two_end=False):
* cdef int w_len = window // 2 * 2 + 1 # <<<<<<<<<<<<<<
* cdef int t_len = len(data)
* cdef float[:] val = array.array('f', [x for x in data])
*/
__pyx_v_w_len = ((__Pyx_div_long(__pyx_v_window, 2) * 2) + 1);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":20
* cpdef median_filter(np.ndarray data, int window, bint need_two_end=False):
* cdef int w_len = window // 2 * 2 + 1
* cdef int t_len = len(data) # <<<<<<<<<<<<<<
* cdef float[:] val = array.array('f', [x for x in data])
* cdef float[:] ans = array.array('f', [x for x in data])
*/
__pyx_t_1 = PyObject_Length(((PyObject *)__pyx_v_data)); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 20, __pyx_L1_error)
__pyx_v_t_len = __pyx_t_1;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":21
* cdef int w_len = window // 2 * 2 + 1
* cdef int t_len = len(data)
* cdef float[:] val = array.array('f', [x for x in data]) # <<<<<<<<<<<<<<
* cdef float[:] ans = array.array('f', [x for x in data])
* cdef float[:] cur_windows = array.array('f', [0 for x in range(w_len)])
*/
__Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_array); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_array); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
if (likely(PyList_CheckExact(((PyObject *)__pyx_v_data))) || PyTuple_CheckExact(((PyObject *)__pyx_v_data))) {
__pyx_t_5 = ((PyObject *)__pyx_v_data); __Pyx_INCREF(__pyx_t_5); __pyx_t_1 = 0;
__pyx_t_6 = NULL;
} else {
__pyx_t_1 = -1; __pyx_t_5 = PyObject_GetIter(((PyObject *)__pyx_v_data)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 21, __pyx_L1_error)
}
for (;;) {
if (likely(!__pyx_t_6)) {
if (likely(PyList_CheckExact(__pyx_t_5))) {
if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_5)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_7 = PyList_GET_ITEM(__pyx_t_5, __pyx_t_1); __Pyx_INCREF(__pyx_t_7); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 21, __pyx_L1_error)
#else
__pyx_t_7 = PySequence_ITEM(__pyx_t_5, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
#endif
} else {
if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_5)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_7 = PyTuple_GET_ITEM(__pyx_t_5, __pyx_t_1); __Pyx_INCREF(__pyx_t_7); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 21, __pyx_L1_error)
#else
__pyx_t_7 = PySequence_ITEM(__pyx_t_5, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
#endif
}
} else {
__pyx_t_7 = __pyx_t_6(__pyx_t_5);
if (unlikely(!__pyx_t_7)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(0, 21, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_7);
}
__Pyx_XDECREF_SET(__pyx_v_x, __pyx_t_7);
__pyx_t_7 = 0;
if (unlikely(__Pyx_ListComp_Append(__pyx_t_3, (PyObject*)__pyx_v_x))) __PYX_ERR(0, 21, __pyx_L1_error)
}
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_t_5 = NULL;
__pyx_t_8 = 0;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) {
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4);
if (likely(__pyx_t_5)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
__Pyx_INCREF(__pyx_t_5);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_4, function);
__pyx_t_8 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_4)) {
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_n_s_f, __pyx_t_3};
__pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) {
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_n_s_f, __pyx_t_3};
__pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
} else
#endif
{
__pyx_t_7 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
if (__pyx_t_5) {
__Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL;
}
__Pyx_INCREF(__pyx_n_s_f);
__Pyx_GIVEREF(__pyx_n_s_f);
PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_8, __pyx_n_s_f);
__Pyx_GIVEREF(__pyx_t_3);
PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_8, __pyx_t_3);
__pyx_t_3 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
}
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_9 = __Pyx_PyObject_to_MemoryviewSlice_ds_float(__pyx_t_2, PyBUF_WRITABLE); if (unlikely(!__pyx_t_9.memview)) __PYX_ERR(0, 21, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_v_val = __pyx_t_9;
__pyx_t_9.memview = NULL;
__pyx_t_9.data = NULL;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":22
* cdef int t_len = len(data)
* cdef float[:] val = array.array('f', [x for x in data])
* cdef float[:] ans = array.array('f', [x for x in data]) # <<<<<<<<<<<<<<
* cdef float[:] cur_windows = array.array('f', [0 for x in range(w_len)])
* cdef int delete_id
*/
__Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_array); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_array); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
if (likely(PyList_CheckExact(((PyObject *)__pyx_v_data))) || PyTuple_CheckExact(((PyObject *)__pyx_v_data))) {
__pyx_t_3 = ((PyObject *)__pyx_v_data); __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0;
__pyx_t_6 = NULL;
} else {
__pyx_t_1 = -1; __pyx_t_3 = PyObject_GetIter(((PyObject *)__pyx_v_data)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_6 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 22, __pyx_L1_error)
}
for (;;) {
if (likely(!__pyx_t_6)) {
if (likely(PyList_CheckExact(__pyx_t_3))) {
if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_3)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_5 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 22, __pyx_L1_error)
#else
__pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
#endif
} else {
if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 22, __pyx_L1_error)
#else
__pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
#endif
}
} else {
__pyx_t_5 = __pyx_t_6(__pyx_t_3);
if (unlikely(!__pyx_t_5)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(0, 22, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_5);
}
__Pyx_XDECREF_SET(__pyx_v_x, __pyx_t_5);
__pyx_t_5 = 0;
if (unlikely(__Pyx_ListComp_Append(__pyx_t_4, (PyObject*)__pyx_v_x))) __PYX_ERR(0, 22, __pyx_L1_error)
}
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_3 = NULL;
__pyx_t_8 = 0;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) {
__pyx_t_3 = PyMethod_GET_SELF(__pyx_t_7);
if (likely(__pyx_t_3)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7);
__Pyx_INCREF(__pyx_t_3);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_7, function);
__pyx_t_8 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_7)) {
PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_n_s_f, __pyx_t_4};
__pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) {
PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_n_s_f, __pyx_t_4};
__pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
} else
#endif
{
__pyx_t_5 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
if (__pyx_t_3) {
__Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL;
}
__Pyx_INCREF(__pyx_n_s_f);
__Pyx_GIVEREF(__pyx_n_s_f);
PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_8, __pyx_n_s_f);
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_8, __pyx_t_4);
__pyx_t_4 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_5, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
}
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__pyx_t_9 = __Pyx_PyObject_to_MemoryviewSlice_ds_float(__pyx_t_2, PyBUF_WRITABLE); if (unlikely(!__pyx_t_9.memview)) __PYX_ERR(0, 22, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_v_ans = __pyx_t_9;
__pyx_t_9.memview = NULL;
__pyx_t_9.data = NULL;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":23
* cdef float[:] val = array.array('f', [x for x in data])
* cdef float[:] ans = array.array('f', [x for x in data])
* cdef float[:] cur_windows = array.array('f', [0 for x in range(w_len)]) # <<<<<<<<<<<<<<
* cdef int delete_id
* cdef int add_id
*/
__Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_array); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_array); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__pyx_t_7 = PyList_New(0); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_w_len); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_range, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (likely(PyList_CheckExact(__pyx_t_3)) || PyTuple_CheckExact(__pyx_t_3)) {
__pyx_t_4 = __pyx_t_3; __Pyx_INCREF(__pyx_t_4); __pyx_t_1 = 0;
__pyx_t_6 = NULL;
} else {
__pyx_t_1 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_6 = Py_TYPE(__pyx_t_4)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 23, __pyx_L1_error)
}
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
for (;;) {
if (likely(!__pyx_t_6)) {
if (likely(PyList_CheckExact(__pyx_t_4))) {
if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_4)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_3 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_1); __Pyx_INCREF(__pyx_t_3); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 23, __pyx_L1_error)
#else
__pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
#endif
} else {
if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_4)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_1); __Pyx_INCREF(__pyx_t_3); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 23, __pyx_L1_error)
#else
__pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
#endif
}
} else {
__pyx_t_3 = __pyx_t_6(__pyx_t_4);
if (unlikely(!__pyx_t_3)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(0, 23, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_3);
}
__Pyx_XDECREF_SET(__pyx_v_x, __pyx_t_3);
__pyx_t_3 = 0;
if (unlikely(__Pyx_ListComp_Append(__pyx_t_7, (PyObject*)__pyx_int_0))) __PYX_ERR(0, 23, __pyx_L1_error)
}
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_4 = NULL;
__pyx_t_8 = 0;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {
__pyx_t_4 = PyMethod_GET_SELF(__pyx_t_5);
if (likely(__pyx_t_4)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
__Pyx_INCREF(__pyx_t_4);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_5, function);
__pyx_t_8 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_5)) {
PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_n_s_f, __pyx_t_7};
__pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) {
PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_n_s_f, __pyx_t_7};
__pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
} else
#endif
{
__pyx_t_3 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
if (__pyx_t_4) {
__Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
}
__Pyx_INCREF(__pyx_n_s_f);
__Pyx_GIVEREF(__pyx_n_s_f);
PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_8, __pyx_n_s_f);
__Pyx_GIVEREF(__pyx_t_7);
PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_8, __pyx_t_7);
__pyx_t_7 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
}
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_t_9 = __Pyx_PyObject_to_MemoryviewSlice_ds_float(__pyx_t_2, PyBUF_WRITABLE); if (unlikely(!__pyx_t_9.memview)) __PYX_ERR(0, 23, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_v_cur_windows = __pyx_t_9;
__pyx_t_9.memview = NULL;
__pyx_t_9.data = NULL;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":27
* cdef int add_id
* cdef int index
* if t_len < w_len: # <<<<<<<<<<<<<<
* return ans
* for i in range(0, w_len):
*/
__pyx_t_10 = ((__pyx_v_t_len < __pyx_v_w_len) != 0);
if (__pyx_t_10) {
/* "msanomalydetector/_anomaly_kernel_cython.pyx":28
* cdef int index
* if t_len < w_len:
* return ans # <<<<<<<<<<<<<<
* for i in range(0, w_len):
* index = i
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __pyx_memoryview_fromslice(__pyx_v_ans, 1, (PyObject *(*)(char *)) __pyx_memview_get_float, (int (*)(char *, PyObject *)) __pyx_memview_set_float, 0);; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 28, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":27
* cdef int add_id
* cdef int index
* if t_len < w_len: # <<<<<<<<<<<<<<
* return ans
* for i in range(0, w_len):
*/
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":29
* if t_len < w_len:
* return ans
* for i in range(0, w_len): # <<<<<<<<<<<<<<
* index = i
* add_id = bisect.bisect_right(cur_windows[:i], val[i])
*/
__pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_w_len); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 29, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 29, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_INCREF(__pyx_int_0);
__Pyx_GIVEREF(__pyx_int_0);
PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_int_0);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2);
__pyx_t_2 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_range, __pyx_t_5, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 29, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
if (likely(PyList_CheckExact(__pyx_t_2)) || PyTuple_CheckExact(__pyx_t_2)) {
__pyx_t_5 = __pyx_t_2; __Pyx_INCREF(__pyx_t_5); __pyx_t_1 = 0;
__pyx_t_6 = NULL;
} else {
__pyx_t_1 = -1; __pyx_t_5 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 29, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 29, __pyx_L1_error)
}
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
for (;;) {
if (likely(!__pyx_t_6)) {
if (likely(PyList_CheckExact(__pyx_t_5))) {
if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_5)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_2 = PyList_GET_ITEM(__pyx_t_5, __pyx_t_1); __Pyx_INCREF(__pyx_t_2); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 29, __pyx_L1_error)
#else
__pyx_t_2 = PySequence_ITEM(__pyx_t_5, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 29, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
#endif
} else {
if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_5)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_5, __pyx_t_1); __Pyx_INCREF(__pyx_t_2); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 29, __pyx_L1_error)
#else
__pyx_t_2 = PySequence_ITEM(__pyx_t_5, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 29, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
#endif
}
} else {
__pyx_t_2 = __pyx_t_6(__pyx_t_5);
if (unlikely(!__pyx_t_2)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(0, 29, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_2);
}
__Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_2);
__pyx_t_2 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":30
* return ans
* for i in range(0, w_len):
* index = i # <<<<<<<<<<<<<<
* add_id = bisect.bisect_right(cur_windows[:i], val[i])
* while index > add_id:
*/
__pyx_t_8 = __Pyx_PyInt_As_int(__pyx_v_i); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 30, __pyx_L1_error)
__pyx_v_index = __pyx_t_8;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":31
* for i in range(0, w_len):
* index = i
* add_id = bisect.bisect_right(cur_windows[:i], val[i]) # <<<<<<<<<<<<<<
* while index > add_id:
* cur_windows[index] = cur_windows[index - 1]
*/
__Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_bisect); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_bisect_right); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_11 = __Pyx_PyIndex_AsSsize_t(__pyx_v_i); if (unlikely((__pyx_t_11 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 31, __pyx_L1_error)
__pyx_t_9.data = __pyx_v_cur_windows.data;
__pyx_t_9.memview = __pyx_v_cur_windows.memview;
__PYX_INC_MEMVIEW(&__pyx_t_9, 0);
__pyx_t_8 = -1;
if (unlikely(__pyx_memoryview_slice_memviewslice(
&__pyx_t_9,
__pyx_v_cur_windows.shape[0], __pyx_v_cur_windows.strides[0], __pyx_v_cur_windows.suboffsets[0],
0,
0,
&__pyx_t_8,
0,
__pyx_t_11,
0,
0,
1,
0,
1) < 0))
{
__PYX_ERR(0, 31, __pyx_L1_error)
}
__pyx_t_3 = __pyx_memoryview_fromslice(__pyx_t_9, 1, (PyObject *(*)(char *)) __pyx_memview_get_float, (int (*)(char *, PyObject *)) __pyx_memview_set_float, 0);; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__PYX_XDEC_MEMVIEW(&__pyx_t_9, 1);
__pyx_t_9.memview = NULL;
__pyx_t_9.data = NULL;
__pyx_t_11 = __Pyx_PyIndex_AsSsize_t(__pyx_v_i); if (unlikely((__pyx_t_11 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 31, __pyx_L1_error)
__pyx_t_12 = __pyx_t_11;
__pyx_t_8 = -1;
if (__pyx_t_12 < 0) {
__pyx_t_12 += __pyx_v_val.shape[0];
if (unlikely(__pyx_t_12 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_12 >= __pyx_v_val.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 31, __pyx_L1_error)
}
__pyx_t_4 = PyFloat_FromDouble((*((float *) ( /* dim=0 */ (__pyx_v_val.data + __pyx_t_12 * __pyx_v_val.strides[0]) )))); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_13 = NULL;
__pyx_t_8 = 0;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) {
__pyx_t_13 = PyMethod_GET_SELF(__pyx_t_7);
if (likely(__pyx_t_13)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7);
__Pyx_INCREF(__pyx_t_13);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_7, function);
__pyx_t_8 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_7)) {
PyObject *__pyx_temp[3] = {__pyx_t_13, __pyx_t_3, __pyx_t_4};
__pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) {
PyObject *__pyx_temp[3] = {__pyx_t_13, __pyx_t_3, __pyx_t_4};
__pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
} else
#endif
{
__pyx_t_14 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
if (__pyx_t_13) {
__Pyx_GIVEREF(__pyx_t_13); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_13); __pyx_t_13 = NULL;
}
__Pyx_GIVEREF(__pyx_t_3);
PyTuple_SET_ITEM(__pyx_t_14, 0+__pyx_t_8, __pyx_t_3);
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_14, 1+__pyx_t_8, __pyx_t_4);
__pyx_t_3 = 0;
__pyx_t_4 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_14, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
}
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__pyx_t_8 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 31, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_v_add_id = __pyx_t_8;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":32
* index = i
* add_id = bisect.bisect_right(cur_windows[:i], val[i])
* while index > add_id: # <<<<<<<<<<<<<<
* cur_windows[index] = cur_windows[index - 1]
* index -= 1
*/
while (1) {
__pyx_t_10 = ((__pyx_v_index > __pyx_v_add_id) != 0);
if (!__pyx_t_10) break;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":33
* add_id = bisect.bisect_right(cur_windows[:i], val[i])
* while index > add_id:
* cur_windows[index] = cur_windows[index - 1] # <<<<<<<<<<<<<<
* index -= 1
* cur_windows[add_id] = data[i]
*/
__pyx_t_15 = (__pyx_v_index - 1);
__pyx_t_8 = -1;
if (__pyx_t_15 < 0) {
__pyx_t_15 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_15 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_15 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 33, __pyx_L1_error)
}
__pyx_t_16 = __pyx_v_index;
__pyx_t_8 = -1;
if (__pyx_t_16 < 0) {
__pyx_t_16 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_16 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_16 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 33, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_16 * __pyx_v_cur_windows.strides[0]) )) = (*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_15 * __pyx_v_cur_windows.strides[0]) )));
/* "msanomalydetector/_anomaly_kernel_cython.pyx":34
* while index > add_id:
* cur_windows[index] = cur_windows[index - 1]
* index -= 1 # <<<<<<<<<<<<<<
* cur_windows[add_id] = data[i]
* if i >= w_len // 2 and need_two_end:
*/
__pyx_v_index = (__pyx_v_index - 1);
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":35
* cur_windows[index] = cur_windows[index - 1]
* index -= 1
* cur_windows[add_id] = data[i] # <<<<<<<<<<<<<<
* if i >= w_len // 2 and need_two_end:
* ans[i - w_len // 2] = sorted_median(cur_windows, 0, i + 1)
*/
__pyx_t_2 = __Pyx_PyObject_GetItem(((PyObject *)__pyx_v_data), __pyx_v_i); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 35, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_17 = __pyx_PyFloat_AsFloat(__pyx_t_2); if (unlikely((__pyx_t_17 == (float)-1) && PyErr_Occurred())) __PYX_ERR(0, 35, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_18 = __pyx_v_add_id;
__pyx_t_8 = -1;
if (__pyx_t_18 < 0) {
__pyx_t_18 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_18 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_18 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 35, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_18 * __pyx_v_cur_windows.strides[0]) )) = __pyx_t_17;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":36
* index -= 1
* cur_windows[add_id] = data[i]
* if i >= w_len // 2 and need_two_end: # <<<<<<<<<<<<<<
* ans[i - w_len // 2] = sorted_median(cur_windows, 0, i + 1)
* ans[window // 2] = sorted_median(cur_windows, 0, w_len)
*/
__pyx_t_2 = __Pyx_PyInt_From_long(__Pyx_div_long(__pyx_v_w_len, 2)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 36, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_7 = PyObject_RichCompare(__pyx_v_i, __pyx_t_2, Py_GE); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 36, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_19 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_19 < 0)) __PYX_ERR(0, 36, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
if (__pyx_t_19) {
} else {
__pyx_t_10 = __pyx_t_19;
goto __pyx_L15_bool_binop_done;
}
__pyx_t_19 = (__pyx_v_need_two_end != 0);
__pyx_t_10 = __pyx_t_19;
__pyx_L15_bool_binop_done:;
if (__pyx_t_10) {
/* "msanomalydetector/_anomaly_kernel_cython.pyx":37
* cur_windows[add_id] = data[i]
* if i >= w_len // 2 and need_two_end:
* ans[i - w_len // 2] = sorted_median(cur_windows, 0, i + 1) # <<<<<<<<<<<<<<
* ans[window // 2] = sorted_median(cur_windows, 0, w_len)
* for i in range(window // 2 + 1, t_len - window // 2):
*/
__pyx_t_7 = __Pyx_PyInt_AddObjC(__pyx_v_i, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 37, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_8 = __Pyx_PyInt_As_int(__pyx_t_7); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 37, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__pyx_t_7 = __Pyx_PyInt_From_long(__Pyx_div_long(__pyx_v_w_len, 2)); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 37, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_2 = PyNumber_Subtract(__pyx_v_i, __pyx_t_7); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 37, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__pyx_t_11 = __Pyx_PyIndex_AsSsize_t(__pyx_t_2); if (unlikely((__pyx_t_11 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 37, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_20 = __pyx_t_11;
__pyx_t_21 = -1;
if (__pyx_t_20 < 0) {
__pyx_t_20 += __pyx_v_ans.shape[0];
if (unlikely(__pyx_t_20 < 0)) __pyx_t_21 = 0;
} else if (unlikely(__pyx_t_20 >= __pyx_v_ans.shape[0])) __pyx_t_21 = 0;
if (unlikely(__pyx_t_21 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_21);
__PYX_ERR(0, 37, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_ans.data + __pyx_t_20 * __pyx_v_ans.strides[0]) )) = __pyx_f_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(__pyx_v_cur_windows, 0, __pyx_t_8, 0);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":36
* index -= 1
* cur_windows[add_id] = data[i]
* if i >= w_len // 2 and need_two_end: # <<<<<<<<<<<<<<
* ans[i - w_len // 2] = sorted_median(cur_windows, 0, i + 1)
* ans[window // 2] = sorted_median(cur_windows, 0, w_len)
*/
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":29
* if t_len < w_len:
* return ans
* for i in range(0, w_len): # <<<<<<<<<<<<<<
* index = i
* add_id = bisect.bisect_right(cur_windows[:i], val[i])
*/
}
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":38
* if i >= w_len // 2 and need_two_end:
* ans[i - w_len // 2] = sorted_median(cur_windows, 0, i + 1)
* ans[window // 2] = sorted_median(cur_windows, 0, w_len) # <<<<<<<<<<<<<<
* for i in range(window // 2 + 1, t_len - window // 2):
* delete_id = bisect.bisect_right(cur_windows, val[i - window // 2 - 1]) - 1
*/
__pyx_t_22 = __Pyx_div_long(__pyx_v_window, 2);
__pyx_t_8 = -1;
if (__pyx_t_22 < 0) {
__pyx_t_22 += __pyx_v_ans.shape[0];
if (unlikely(__pyx_t_22 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_22 >= __pyx_v_ans.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 38, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_ans.data + __pyx_t_22 * __pyx_v_ans.strides[0]) )) = __pyx_f_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(__pyx_v_cur_windows, 0, __pyx_v_w_len, 0);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":39
* ans[i - w_len // 2] = sorted_median(cur_windows, 0, i + 1)
* ans[window // 2] = sorted_median(cur_windows, 0, w_len)
* for i in range(window // 2 + 1, t_len - window // 2): # <<<<<<<<<<<<<<
* delete_id = bisect.bisect_right(cur_windows, val[i - window // 2 - 1]) - 1
* index = delete_id
*/
__pyx_t_5 = __Pyx_PyInt_From_long((__Pyx_div_long(__pyx_v_window, 2) + 1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_2 = __Pyx_PyInt_From_long((__pyx_v_t_len - __Pyx_div_long(__pyx_v_window, 2))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__Pyx_GIVEREF(__pyx_t_5);
PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_2);
__pyx_t_5 = 0;
__pyx_t_2 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_range, __pyx_t_7, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
if (likely(PyList_CheckExact(__pyx_t_2)) || PyTuple_CheckExact(__pyx_t_2)) {
__pyx_t_7 = __pyx_t_2; __Pyx_INCREF(__pyx_t_7); __pyx_t_1 = 0;
__pyx_t_6 = NULL;
} else {
__pyx_t_1 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_6 = Py_TYPE(__pyx_t_7)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 39, __pyx_L1_error)
}
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
for (;;) {
if (likely(!__pyx_t_6)) {
if (likely(PyList_CheckExact(__pyx_t_7))) {
if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_7)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_2 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_1); __Pyx_INCREF(__pyx_t_2); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 39, __pyx_L1_error)
#else
__pyx_t_2 = PySequence_ITEM(__pyx_t_7, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
#endif
} else {
if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_7)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_1); __Pyx_INCREF(__pyx_t_2); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 39, __pyx_L1_error)
#else
__pyx_t_2 = PySequence_ITEM(__pyx_t_7, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
#endif
}
} else {
__pyx_t_2 = __pyx_t_6(__pyx_t_7);
if (unlikely(!__pyx_t_2)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(0, 39, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_2);
}
__Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_2);
__pyx_t_2 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":40
* ans[window // 2] = sorted_median(cur_windows, 0, w_len)
* for i in range(window // 2 + 1, t_len - window // 2):
* delete_id = bisect.bisect_right(cur_windows, val[i - window // 2 - 1]) - 1 # <<<<<<<<<<<<<<
* index = delete_id
* while index < w_len - 1:
*/
__Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_bisect); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_bisect_right); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_t_5 = __pyx_memoryview_fromslice(__pyx_v_cur_windows, 1, (PyObject *(*)(char *)) __pyx_memview_get_float, (int (*)(char *, PyObject *)) __pyx_memview_set_float, 0);; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_4 = __Pyx_PyInt_From_long(__Pyx_div_long(__pyx_v_window, 2)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyNumber_Subtract(__pyx_v_i, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_4 = __Pyx_PyInt_SubtractObjC(__pyx_t_3, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_11 = __Pyx_PyIndex_AsSsize_t(__pyx_t_4); if (unlikely((__pyx_t_11 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_23 = __pyx_t_11;
__pyx_t_8 = -1;
if (__pyx_t_23 < 0) {
__pyx_t_23 += __pyx_v_val.shape[0];
if (unlikely(__pyx_t_23 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_23 >= __pyx_v_val.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 40, __pyx_L1_error)
}
__pyx_t_4 = PyFloat_FromDouble((*((float *) ( /* dim=0 */ (__pyx_v_val.data + __pyx_t_23 * __pyx_v_val.strides[0]) )))); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = NULL;
__pyx_t_8 = 0;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_14))) {
__pyx_t_3 = PyMethod_GET_SELF(__pyx_t_14);
if (likely(__pyx_t_3)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14);
__Pyx_INCREF(__pyx_t_3);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_14, function);
__pyx_t_8 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_14)) {
PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_t_5, __pyx_t_4};
__pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_14, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_14)) {
PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_t_5, __pyx_t_4};
__pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_14, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
} else
#endif
{
__pyx_t_13 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_13);
if (__pyx_t_3) {
__Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_13, 0, __pyx_t_3); __pyx_t_3 = NULL;
}
__Pyx_GIVEREF(__pyx_t_5);
PyTuple_SET_ITEM(__pyx_t_13, 0+__pyx_t_8, __pyx_t_5);
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_13, 1+__pyx_t_8, __pyx_t_4);
__pyx_t_5 = 0;
__pyx_t_4 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_14, __pyx_t_13, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
}
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
__pyx_t_14 = __Pyx_PyInt_SubtractObjC(__pyx_t_2, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_8 = __Pyx_PyInt_As_int(__pyx_t_14); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 40, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
__pyx_v_delete_id = __pyx_t_8;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":41
* for i in range(window // 2 + 1, t_len - window // 2):
* delete_id = bisect.bisect_right(cur_windows, val[i - window // 2 - 1]) - 1
* index = delete_id # <<<<<<<<<<<<<<
* while index < w_len - 1:
* cur_windows[index] = cur_windows[index + 1]
*/
__pyx_v_index = __pyx_v_delete_id;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":42
* delete_id = bisect.bisect_right(cur_windows, val[i - window // 2 - 1]) - 1
* index = delete_id
* while index < w_len - 1: # <<<<<<<<<<<<<<
* cur_windows[index] = cur_windows[index + 1]
* index += 1
*/
while (1) {
__pyx_t_10 = ((__pyx_v_index < (__pyx_v_w_len - 1)) != 0);
if (!__pyx_t_10) break;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":43
* index = delete_id
* while index < w_len - 1:
* cur_windows[index] = cur_windows[index + 1] # <<<<<<<<<<<<<<
* index += 1
*
*/
__pyx_t_24 = (__pyx_v_index + 1);
__pyx_t_8 = -1;
if (__pyx_t_24 < 0) {
__pyx_t_24 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_24 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_24 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 43, __pyx_L1_error)
}
__pyx_t_25 = __pyx_v_index;
__pyx_t_8 = -1;
if (__pyx_t_25 < 0) {
__pyx_t_25 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_25 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_25 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 43, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_25 * __pyx_v_cur_windows.strides[0]) )) = (*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_24 * __pyx_v_cur_windows.strides[0]) )));
/* "msanomalydetector/_anomaly_kernel_cython.pyx":44
* while index < w_len - 1:
* cur_windows[index] = cur_windows[index + 1]
* index += 1 # <<<<<<<<<<<<<<
*
* add_id = bisect.bisect_right(cur_windows[:w_len - 1], val[i + window // 2])
*/
__pyx_v_index = (__pyx_v_index + 1);
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":46
* index += 1
*
* add_id = bisect.bisect_right(cur_windows[:w_len - 1], val[i + window // 2]) # <<<<<<<<<<<<<<
* index = w_len - 1
* while index > add_id:
*/
__Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_bisect); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_13 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_bisect_right); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_13);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_9.data = __pyx_v_cur_windows.data;
__pyx_t_9.memview = __pyx_v_cur_windows.memview;
__PYX_INC_MEMVIEW(&__pyx_t_9, 0);
__pyx_t_8 = -1;
if (unlikely(__pyx_memoryview_slice_memviewslice(
&__pyx_t_9,
__pyx_v_cur_windows.shape[0], __pyx_v_cur_windows.strides[0], __pyx_v_cur_windows.suboffsets[0],
0,
0,
&__pyx_t_8,
0,
(__pyx_v_w_len - 1),
0,
0,
1,
0,
1) < 0))
{
__PYX_ERR(0, 46, __pyx_L1_error)
}
__pyx_t_2 = __pyx_memoryview_fromslice(__pyx_t_9, 1, (PyObject *(*)(char *)) __pyx_memview_get_float, (int (*)(char *, PyObject *)) __pyx_memview_set_float, 0);; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__PYX_XDEC_MEMVIEW(&__pyx_t_9, 1);
__pyx_t_9.memview = NULL;
__pyx_t_9.data = NULL;
__pyx_t_4 = __Pyx_PyInt_From_long(__Pyx_div_long(__pyx_v_window, 2)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_5 = PyNumber_Add(__pyx_v_i, __pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_11 = __Pyx_PyIndex_AsSsize_t(__pyx_t_5); if (unlikely((__pyx_t_11 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_t_26 = __pyx_t_11;
__pyx_t_8 = -1;
if (__pyx_t_26 < 0) {
__pyx_t_26 += __pyx_v_val.shape[0];
if (unlikely(__pyx_t_26 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_26 >= __pyx_v_val.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 46, __pyx_L1_error)
}
__pyx_t_5 = PyFloat_FromDouble((*((float *) ( /* dim=0 */ (__pyx_v_val.data + __pyx_t_26 * __pyx_v_val.strides[0]) )))); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_4 = NULL;
__pyx_t_8 = 0;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_13))) {
__pyx_t_4 = PyMethod_GET_SELF(__pyx_t_13);
if (likely(__pyx_t_4)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_13);
__Pyx_INCREF(__pyx_t_4);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_13, function);
__pyx_t_8 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_13)) {
PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_2, __pyx_t_5};
__pyx_t_14 = __Pyx_PyFunction_FastCall(__pyx_t_13, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_13)) {
PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_2, __pyx_t_5};
__pyx_t_14 = __Pyx_PyCFunction_FastCall(__pyx_t_13, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
} else
#endif
{
__pyx_t_3 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
if (__pyx_t_4) {
__Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL;
}
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_8, __pyx_t_2);
__Pyx_GIVEREF(__pyx_t_5);
PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_8, __pyx_t_5);
__pyx_t_2 = 0;
__pyx_t_5 = 0;
__pyx_t_14 = __Pyx_PyObject_Call(__pyx_t_13, __pyx_t_3, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
}
__Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
__pyx_t_8 = __Pyx_PyInt_As_int(__pyx_t_14); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 46, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
__pyx_v_add_id = __pyx_t_8;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":47
*
* add_id = bisect.bisect_right(cur_windows[:w_len - 1], val[i + window // 2])
* index = w_len - 1 # <<<<<<<<<<<<<<
* while index > add_id:
* cur_windows[index] = cur_windows[index - 1]
*/
__pyx_v_index = (__pyx_v_w_len - 1);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":48
* add_id = bisect.bisect_right(cur_windows[:w_len - 1], val[i + window // 2])
* index = w_len - 1
* while index > add_id: # <<<<<<<<<<<<<<
* cur_windows[index] = cur_windows[index - 1]
* index -= 1
*/
while (1) {
__pyx_t_10 = ((__pyx_v_index > __pyx_v_add_id) != 0);
if (!__pyx_t_10) break;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":49
* index = w_len - 1
* while index > add_id:
* cur_windows[index] = cur_windows[index - 1] # <<<<<<<<<<<<<<
* index -= 1
* cur_windows[add_id] = data[i + window // 2]
*/
__pyx_t_27 = (__pyx_v_index - 1);
__pyx_t_8 = -1;
if (__pyx_t_27 < 0) {
__pyx_t_27 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_27 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_27 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 49, __pyx_L1_error)
}
__pyx_t_28 = __pyx_v_index;
__pyx_t_8 = -1;
if (__pyx_t_28 < 0) {
__pyx_t_28 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_28 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_28 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 49, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_28 * __pyx_v_cur_windows.strides[0]) )) = (*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_27 * __pyx_v_cur_windows.strides[0]) )));
/* "msanomalydetector/_anomaly_kernel_cython.pyx":50
* while index > add_id:
* cur_windows[index] = cur_windows[index - 1]
* index -= 1 # <<<<<<<<<<<<<<
* cur_windows[add_id] = data[i + window // 2]
*
*/
__pyx_v_index = (__pyx_v_index - 1);
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":51
* cur_windows[index] = cur_windows[index - 1]
* index -= 1
* cur_windows[add_id] = data[i + window // 2] # <<<<<<<<<<<<<<
*
* ans[i] = sorted_median(cur_windows, 0, w_len)
*/
__pyx_t_14 = __Pyx_PyInt_From_long(__Pyx_div_long(__pyx_v_window, 2)); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 51, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
__pyx_t_13 = PyNumber_Add(__pyx_v_i, __pyx_t_14); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 51, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_13);
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
__pyx_t_14 = __Pyx_PyObject_GetItem(((PyObject *)__pyx_v_data), __pyx_t_13); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 51, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
__pyx_t_17 = __pyx_PyFloat_AsFloat(__pyx_t_14); if (unlikely((__pyx_t_17 == (float)-1) && PyErr_Occurred())) __PYX_ERR(0, 51, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
__pyx_t_29 = __pyx_v_add_id;
__pyx_t_8 = -1;
if (__pyx_t_29 < 0) {
__pyx_t_29 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_29 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_29 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 51, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_29 * __pyx_v_cur_windows.strides[0]) )) = __pyx_t_17;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":53
* cur_windows[add_id] = data[i + window // 2]
*
* ans[i] = sorted_median(cur_windows, 0, w_len) # <<<<<<<<<<<<<<
*
* if need_two_end:
*/
__pyx_t_11 = __Pyx_PyIndex_AsSsize_t(__pyx_v_i); if (unlikely((__pyx_t_11 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 53, __pyx_L1_error)
__pyx_t_30 = __pyx_t_11;
__pyx_t_8 = -1;
if (__pyx_t_30 < 0) {
__pyx_t_30 += __pyx_v_ans.shape[0];
if (unlikely(__pyx_t_30 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_30 >= __pyx_v_ans.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 53, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_ans.data + __pyx_t_30 * __pyx_v_ans.strides[0]) )) = __pyx_f_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(__pyx_v_cur_windows, 0, __pyx_v_w_len, 0);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":39
* ans[i - w_len // 2] = sorted_median(cur_windows, 0, i + 1)
* ans[window // 2] = sorted_median(cur_windows, 0, w_len)
* for i in range(window // 2 + 1, t_len - window // 2): # <<<<<<<<<<<<<<
* delete_id = bisect.bisect_right(cur_windows, val[i - window // 2 - 1]) - 1
* index = delete_id
*/
}
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":55
* ans[i] = sorted_median(cur_windows, 0, w_len)
*
* if need_two_end: # <<<<<<<<<<<<<<
* for i in range(t_len - window // 2, t_len):
* delete_id = bisect.bisect_right(cur_windows[: w_len], data[i - window // 2 - 1]) - 1
*/
__pyx_t_10 = (__pyx_v_need_two_end != 0);
if (__pyx_t_10) {
/* "msanomalydetector/_anomaly_kernel_cython.pyx":56
*
* if need_two_end:
* for i in range(t_len - window // 2, t_len): # <<<<<<<<<<<<<<
* delete_id = bisect.bisect_right(cur_windows[: w_len], data[i - window // 2 - 1]) - 1
* index = delete_id
*/
__pyx_t_7 = __Pyx_PyInt_From_long((__pyx_v_t_len - __Pyx_div_long(__pyx_v_window, 2))); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 56, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_14 = __Pyx_PyInt_From_int(__pyx_v_t_len); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 56, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
__pyx_t_13 = PyTuple_New(2); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 56, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_13);
__Pyx_GIVEREF(__pyx_t_7);
PyTuple_SET_ITEM(__pyx_t_13, 0, __pyx_t_7);
__Pyx_GIVEREF(__pyx_t_14);
PyTuple_SET_ITEM(__pyx_t_13, 1, __pyx_t_14);
__pyx_t_7 = 0;
__pyx_t_14 = 0;
__pyx_t_14 = __Pyx_PyObject_Call(__pyx_builtin_range, __pyx_t_13, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 56, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
if (likely(PyList_CheckExact(__pyx_t_14)) || PyTuple_CheckExact(__pyx_t_14)) {
__pyx_t_13 = __pyx_t_14; __Pyx_INCREF(__pyx_t_13); __pyx_t_1 = 0;
__pyx_t_6 = NULL;
} else {
__pyx_t_1 = -1; __pyx_t_13 = PyObject_GetIter(__pyx_t_14); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 56, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_13);
__pyx_t_6 = Py_TYPE(__pyx_t_13)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 56, __pyx_L1_error)
}
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
for (;;) {
if (likely(!__pyx_t_6)) {
if (likely(PyList_CheckExact(__pyx_t_13))) {
if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_13)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_14 = PyList_GET_ITEM(__pyx_t_13, __pyx_t_1); __Pyx_INCREF(__pyx_t_14); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 56, __pyx_L1_error)
#else
__pyx_t_14 = PySequence_ITEM(__pyx_t_13, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 56, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
#endif
} else {
if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_13)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_14 = PyTuple_GET_ITEM(__pyx_t_13, __pyx_t_1); __Pyx_INCREF(__pyx_t_14); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 56, __pyx_L1_error)
#else
__pyx_t_14 = PySequence_ITEM(__pyx_t_13, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 56, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
#endif
}
} else {
__pyx_t_14 = __pyx_t_6(__pyx_t_13);
if (unlikely(!__pyx_t_14)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(0, 56, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_14);
}
__Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_14);
__pyx_t_14 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":57
* if need_two_end:
* for i in range(t_len - window // 2, t_len):
* delete_id = bisect.bisect_right(cur_windows[: w_len], data[i - window // 2 - 1]) - 1 # <<<<<<<<<<<<<<
* index = delete_id
* while index < w_len - 1:
*/
__Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_bisect); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_bisect_right); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__pyx_t_9.data = __pyx_v_cur_windows.data;
__pyx_t_9.memview = __pyx_v_cur_windows.memview;
__PYX_INC_MEMVIEW(&__pyx_t_9, 0);
__pyx_t_8 = -1;
if (unlikely(__pyx_memoryview_slice_memviewslice(
&__pyx_t_9,
__pyx_v_cur_windows.shape[0], __pyx_v_cur_windows.strides[0], __pyx_v_cur_windows.suboffsets[0],
0,
0,
&__pyx_t_8,
0,
__pyx_v_w_len,
0,
0,
1,
0,
1) < 0))
{
__PYX_ERR(0, 57, __pyx_L1_error)
}
__pyx_t_7 = __pyx_memoryview_fromslice(__pyx_t_9, 1, (PyObject *(*)(char *)) __pyx_memview_get_float, (int (*)(char *, PyObject *)) __pyx_memview_set_float, 0);; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__PYX_XDEC_MEMVIEW(&__pyx_t_9, 1);
__pyx_t_9.memview = NULL;
__pyx_t_9.data = NULL;
__pyx_t_5 = __Pyx_PyInt_From_long(__Pyx_div_long(__pyx_v_window, 2)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_2 = PyNumber_Subtract(__pyx_v_i, __pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_t_5 = __Pyx_PyInt_SubtractObjC(__pyx_t_2, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_2 = __Pyx_PyObject_GetItem(((PyObject *)__pyx_v_data), __pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_t_5 = NULL;
__pyx_t_8 = 0;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3);
if (likely(__pyx_t_5)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
__Pyx_INCREF(__pyx_t_5);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_3, function);
__pyx_t_8 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_3)) {
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_7, __pyx_t_2};
__pyx_t_14 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_7, __pyx_t_2};
__pyx_t_14 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
} else
#endif
{
__pyx_t_4 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
if (__pyx_t_5) {
__Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); __pyx_t_5 = NULL;
}
__Pyx_GIVEREF(__pyx_t_7);
PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_8, __pyx_t_7);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_8, __pyx_t_2);
__pyx_t_7 = 0;
__pyx_t_2 = 0;
__pyx_t_14 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
}
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_3 = __Pyx_PyInt_SubtractObjC(__pyx_t_14, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
__pyx_t_8 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 57, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_v_delete_id = __pyx_t_8;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":58
* for i in range(t_len - window // 2, t_len):
* delete_id = bisect.bisect_right(cur_windows[: w_len], data[i - window // 2 - 1]) - 1
* index = delete_id # <<<<<<<<<<<<<<
* while index < w_len - 1:
* cur_windows[index] = cur_windows[index + 1]
*/
__pyx_v_index = __pyx_v_delete_id;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":59
* delete_id = bisect.bisect_right(cur_windows[: w_len], data[i - window // 2 - 1]) - 1
* index = delete_id
* while index < w_len - 1: # <<<<<<<<<<<<<<
* cur_windows[index] = cur_windows[index + 1]
* index += 1
*/
while (1) {
__pyx_t_10 = ((__pyx_v_index < (__pyx_v_w_len - 1)) != 0);
if (!__pyx_t_10) break;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":60
* index = delete_id
* while index < w_len - 1:
* cur_windows[index] = cur_windows[index + 1] # <<<<<<<<<<<<<<
* index += 1
* w_len -= 1
*/
__pyx_t_31 = (__pyx_v_index + 1);
__pyx_t_8 = -1;
if (__pyx_t_31 < 0) {
__pyx_t_31 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_31 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_31 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 60, __pyx_L1_error)
}
__pyx_t_32 = __pyx_v_index;
__pyx_t_8 = -1;
if (__pyx_t_32 < 0) {
__pyx_t_32 += __pyx_v_cur_windows.shape[0];
if (unlikely(__pyx_t_32 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_32 >= __pyx_v_cur_windows.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 60, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_32 * __pyx_v_cur_windows.strides[0]) )) = (*((float *) ( /* dim=0 */ (__pyx_v_cur_windows.data + __pyx_t_31 * __pyx_v_cur_windows.strides[0]) )));
/* "msanomalydetector/_anomaly_kernel_cython.pyx":61
* while index < w_len - 1:
* cur_windows[index] = cur_windows[index + 1]
* index += 1 # <<<<<<<<<<<<<<
* w_len -= 1
* ans[i] = sorted_median(cur_windows[: w_len], 0, w_len)
*/
__pyx_v_index = (__pyx_v_index + 1);
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":62
* cur_windows[index] = cur_windows[index + 1]
* index += 1
* w_len -= 1 # <<<<<<<<<<<<<<
* ans[i] = sorted_median(cur_windows[: w_len], 0, w_len)
*
*/
__pyx_v_w_len = (__pyx_v_w_len - 1);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":63
* index += 1
* w_len -= 1
* ans[i] = sorted_median(cur_windows[: w_len], 0, w_len) # <<<<<<<<<<<<<<
*
* return ans
*/
__pyx_t_9.data = __pyx_v_cur_windows.data;
__pyx_t_9.memview = __pyx_v_cur_windows.memview;
__PYX_INC_MEMVIEW(&__pyx_t_9, 0);
__pyx_t_8 = -1;
if (unlikely(__pyx_memoryview_slice_memviewslice(
&__pyx_t_9,
__pyx_v_cur_windows.shape[0], __pyx_v_cur_windows.strides[0], __pyx_v_cur_windows.suboffsets[0],
0,
0,
&__pyx_t_8,
0,
__pyx_v_w_len,
0,
0,
1,
0,
1) < 0))
{
__PYX_ERR(0, 63, __pyx_L1_error)
}
__pyx_t_11 = __Pyx_PyIndex_AsSsize_t(__pyx_v_i); if (unlikely((__pyx_t_11 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 63, __pyx_L1_error)
__pyx_t_33 = __pyx_t_11;
__pyx_t_8 = -1;
if (__pyx_t_33 < 0) {
__pyx_t_33 += __pyx_v_ans.shape[0];
if (unlikely(__pyx_t_33 < 0)) __pyx_t_8 = 0;
} else if (unlikely(__pyx_t_33 >= __pyx_v_ans.shape[0])) __pyx_t_8 = 0;
if (unlikely(__pyx_t_8 != -1)) {
__Pyx_RaiseBufferIndexError(__pyx_t_8);
__PYX_ERR(0, 63, __pyx_L1_error)
}
*((float *) ( /* dim=0 */ (__pyx_v_ans.data + __pyx_t_33 * __pyx_v_ans.strides[0]) )) = __pyx_f_17msanomalydetector_22_anomaly_kernel_cython_sorted_median(__pyx_t_9, 0, __pyx_v_w_len, 0);
__PYX_XDEC_MEMVIEW(&__pyx_t_9, 1);
__pyx_t_9.memview = NULL;
__pyx_t_9.data = NULL;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":56
*
* if need_two_end:
* for i in range(t_len - window // 2, t_len): # <<<<<<<<<<<<<<
* delete_id = bisect.bisect_right(cur_windows[: w_len], data[i - window // 2 - 1]) - 1
* index = delete_id
*/
}
__Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":55
* ans[i] = sorted_median(cur_windows, 0, w_len)
*
* if need_two_end: # <<<<<<<<<<<<<<
* for i in range(t_len - window // 2, t_len):
* delete_id = bisect.bisect_right(cur_windows[: w_len], data[i - window // 2 - 1]) - 1
*/
}
/* "msanomalydetector/_anomaly_kernel_cython.pyx":65
* ans[i] = sorted_median(cur_windows[: w_len], 0, w_len)
*
* return ans # <<<<<<<<<<<<<<
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_13 = __pyx_memoryview_fromslice(__pyx_v_ans, 1, (PyObject *(*)(char *)) __pyx_memview_get_float, (int (*)(char *, PyObject *)) __pyx_memview_set_float, 0);; if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 65, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_13);
__pyx_r = __pyx_t_13;
__pyx_t_13 = 0;
goto __pyx_L0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":18
* return (data[mid - 1] + data[mid])/2
*
* cpdef median_filter(np.ndarray data, int window, bint need_two_end=False): # <<<<<<<<<<<<<<
* cdef int w_len = window // 2 * 2 + 1
* cdef int t_len = len(data)
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_7);
__PYX_XDEC_MEMVIEW(&__pyx_t_9, 1);
__Pyx_XDECREF(__pyx_t_13);
__Pyx_XDECREF(__pyx_t_14);
__Pyx_AddTraceback("msanomalydetector._anomaly_kernel_cython.median_filter", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__PYX_XDEC_MEMVIEW(&__pyx_v_val, 1);
__PYX_XDEC_MEMVIEW(&__pyx_v_ans, 1);
__PYX_XDEC_MEMVIEW(&__pyx_v_cur_windows, 1);
__Pyx_XDECREF(__pyx_v_i);
__Pyx_XDECREF(__pyx_v_x);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* Python wrapper */
static PyObject *__pyx_pw_17msanomalydetector_22_anomaly_kernel_cython_3median_filter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static PyObject *__pyx_pw_17msanomalydetector_22_anomaly_kernel_cython_3median_filter(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyArrayObject *__pyx_v_data = 0;
int __pyx_v_window;
int __pyx_v_need_two_end;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("median_filter (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_data,&__pyx_n_s_window,&__pyx_n_s_need_two_end,0};
PyObject* values[3] = {0,0,0};
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
CYTHON_FALLTHROUGH;
case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
CYTHON_FALLTHROUGH;
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
CYTHON_FALLTHROUGH;
case 1:
if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_window)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("median_filter", 0, 2, 3, 1); __PYX_ERR(0, 18, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 2:
if (kw_args > 0) {
PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_need_two_end);
if (value) { values[2] = value; kw_args--; }
}
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "median_filter") < 0)) __PYX_ERR(0, 18, __pyx_L3_error)
}
} else {
switch (PyTuple_GET_SIZE(__pyx_args)) {
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
CYTHON_FALLTHROUGH;
case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
break;
default: goto __pyx_L5_argtuple_error;
}
}
__pyx_v_data = ((PyArrayObject *)values[0]);
__pyx_v_window = __Pyx_PyInt_As_int(values[1]); if (unlikely((__pyx_v_window == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 18, __pyx_L3_error)
if (values[2]) {
__pyx_v_need_two_end = __Pyx_PyObject_IsTrue(values[2]); if (unlikely((__pyx_v_need_two_end == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 18, __pyx_L3_error)
} else {
__pyx_v_need_two_end = ((int)0);
}
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("median_filter", 0, 2, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 18, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("msanomalydetector._anomaly_kernel_cython.median_filter", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return NULL;
__pyx_L4_argument_unpacking_done:;
if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_data), __pyx_ptype_5numpy_ndarray, 1, "data", 0))) __PYX_ERR(0, 18, __pyx_L1_error)
__pyx_r = __pyx_pf_17msanomalydetector_22_anomaly_kernel_cython_2median_filter(__pyx_self, __pyx_v_data, __pyx_v_window, __pyx_v_need_two_end);
/* function exit code */
goto __pyx_L0;
__pyx_L1_error:;
__pyx_r = NULL;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_17msanomalydetector_22_anomaly_kernel_cython_2median_filter(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_data, int __pyx_v_window, int __pyx_v_need_two_end) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
struct __pyx_opt_args_17msanomalydetector_22_anomaly_kernel_cython_median_filter __pyx_t_2;
__Pyx_RefNannySetupContext("median_filter", 0);
__Pyx_XDECREF(__pyx_r);
__pyx_t_2.__pyx_n = 1;
__pyx_t_2.need_two_end = __pyx_v_need_two_end;
__pyx_t_1 = __pyx_f_17msanomalydetector_22_anomaly_kernel_cython_median_filter(__pyx_v_data, __pyx_v_window, 0, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 18, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("msanomalydetector._anomaly_kernel_cython.median_filter", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":258
* # experimental exception made for __getbuffer__ and __releasebuffer__
* # -- the details of this may change.
* def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<<
* # This implementation of getbuffer is geared towards Cython
* # requirements, and does not yet fulfill the PEP.
*/
/* Python wrapper */
static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/
static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0);
__pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
int __pyx_v_i;
int __pyx_v_ndim;
int __pyx_v_endian_detector;
int __pyx_v_little_endian;
int __pyx_v_t;
char *__pyx_v_f;
PyArray_Descr *__pyx_v_descr = 0;
int __pyx_v_offset;
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
int __pyx_t_4;
int __pyx_t_5;
int __pyx_t_6;
PyArray_Descr *__pyx_t_7;
PyObject *__pyx_t_8 = NULL;
char *__pyx_t_9;
if (__pyx_v_info == NULL) {
PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete");
return -1;
}
__Pyx_RefNannySetupContext("__getbuffer__", 0);
__pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None);
__Pyx_GIVEREF(__pyx_v_info->obj);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":265
*
* cdef int i, ndim
* cdef int endian_detector = 1 # <<<<<<<<<<<<<<
* cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)
*
*/
__pyx_v_endian_detector = 1;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":266
* cdef int i, ndim
* cdef int endian_detector = 1
* cdef bint little_endian = ((<char*>&endian_detector)[0] != 0) # <<<<<<<<<<<<<<
*
* ndim = PyArray_NDIM(self)
*/
__pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":268
* cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)
*
* ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<<
*
* if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
*/
__pyx_v_ndim = PyArray_NDIM(__pyx_v_self);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":270
* ndim = PyArray_NDIM(self)
*
* if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
* raise ValueError(u"ndarray is not C contiguous")
*/
__pyx_t_2 = (((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS) != 0);
if (__pyx_t_2) {
} else {
__pyx_t_1 = __pyx_t_2;
goto __pyx_L4_bool_binop_done;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":271
*
* if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): # <<<<<<<<<<<<<<
* raise ValueError(u"ndarray is not C contiguous")
*
*/
__pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_C_CONTIGUOUS) != 0)) != 0);
__pyx_t_1 = __pyx_t_2;
__pyx_L4_bool_binop_done:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":270
* ndim = PyArray_NDIM(self)
*
* if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
* raise ValueError(u"ndarray is not C contiguous")
*/
if (unlikely(__pyx_t_1)) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":272
* if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
* raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<<
*
* if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 272, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(1, 272, __pyx_L1_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":270
* ndim = PyArray_NDIM(self)
*
* if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<<
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
* raise ValueError(u"ndarray is not C contiguous")
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":274
* raise ValueError(u"ndarray is not C contiguous")
*
* if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
* raise ValueError(u"ndarray is not Fortran contiguous")
*/
__pyx_t_2 = (((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS) != 0);
if (__pyx_t_2) {
} else {
__pyx_t_1 = __pyx_t_2;
goto __pyx_L7_bool_binop_done;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":275
*
* if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): # <<<<<<<<<<<<<<
* raise ValueError(u"ndarray is not Fortran contiguous")
*
*/
__pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_F_CONTIGUOUS) != 0)) != 0);
__pyx_t_1 = __pyx_t_2;
__pyx_L7_bool_binop_done:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":274
* raise ValueError(u"ndarray is not C contiguous")
*
* if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
* raise ValueError(u"ndarray is not Fortran contiguous")
*/
if (unlikely(__pyx_t_1)) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":276
* if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
* raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<<
*
* info.buf = PyArray_DATA(self)
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 276, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(1, 276, __pyx_L1_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":274
* raise ValueError(u"ndarray is not C contiguous")
*
* if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<<
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
* raise ValueError(u"ndarray is not Fortran contiguous")
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":278
* raise ValueError(u"ndarray is not Fortran contiguous")
*
* info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<<
* info.ndim = ndim
* if sizeof(npy_intp) != sizeof(Py_ssize_t):
*/
__pyx_v_info->buf = PyArray_DATA(__pyx_v_self);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":279
*
* info.buf = PyArray_DATA(self)
* info.ndim = ndim # <<<<<<<<<<<<<<
* if sizeof(npy_intp) != sizeof(Py_ssize_t):
* # Allocate new buffer for strides and shape info.
*/
__pyx_v_info->ndim = __pyx_v_ndim;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":280
* info.buf = PyArray_DATA(self)
* info.ndim = ndim
* if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
* # Allocate new buffer for strides and shape info.
* # This is allocated as one block, strides first.
*/
__pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0);
if (__pyx_t_1) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":283
* # Allocate new buffer for strides and shape info.
* # This is allocated as one block, strides first.
* info.strides = <Py_ssize_t*>PyObject_Malloc(sizeof(Py_ssize_t) * 2 * <size_t>ndim) # <<<<<<<<<<<<<<
* info.shape = info.strides + ndim
* for i in range(ndim):
*/
__pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim))));
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":284
* # This is allocated as one block, strides first.
* info.strides = <Py_ssize_t*>PyObject_Malloc(sizeof(Py_ssize_t) * 2 * <size_t>ndim)
* info.shape = info.strides + ndim # <<<<<<<<<<<<<<
* for i in range(ndim):
* info.strides[i] = PyArray_STRIDES(self)[i]
*/
__pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":285
* info.strides = <Py_ssize_t*>PyObject_Malloc(sizeof(Py_ssize_t) * 2 * <size_t>ndim)
* info.shape = info.strides + ndim
* for i in range(ndim): # <<<<<<<<<<<<<<
* info.strides[i] = PyArray_STRIDES(self)[i]
* info.shape[i] = PyArray_DIMS(self)[i]
*/
__pyx_t_4 = __pyx_v_ndim;
__pyx_t_5 = __pyx_t_4;
for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) {
__pyx_v_i = __pyx_t_6;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":286
* info.shape = info.strides + ndim
* for i in range(ndim):
* info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<<
* info.shape[i] = PyArray_DIMS(self)[i]
* else:
*/
(__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":287
* for i in range(ndim):
* info.strides[i] = PyArray_STRIDES(self)[i]
* info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<<
* else:
* info.strides = <Py_ssize_t*>PyArray_STRIDES(self)
*/
(__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]);
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":280
* info.buf = PyArray_DATA(self)
* info.ndim = ndim
* if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
* # Allocate new buffer for strides and shape info.
* # This is allocated as one block, strides first.
*/
goto __pyx_L9;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":289
* info.shape[i] = PyArray_DIMS(self)[i]
* else:
* info.strides = <Py_ssize_t*>PyArray_STRIDES(self) # <<<<<<<<<<<<<<
* info.shape = <Py_ssize_t*>PyArray_DIMS(self)
* info.suboffsets = NULL
*/
/*else*/ {
__pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self));
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":290
* else:
* info.strides = <Py_ssize_t*>PyArray_STRIDES(self)
* info.shape = <Py_ssize_t*>PyArray_DIMS(self) # <<<<<<<<<<<<<<
* info.suboffsets = NULL
* info.itemsize = PyArray_ITEMSIZE(self)
*/
__pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self));
}
__pyx_L9:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":291
* info.strides = <Py_ssize_t*>PyArray_STRIDES(self)
* info.shape = <Py_ssize_t*>PyArray_DIMS(self)
* info.suboffsets = NULL # <<<<<<<<<<<<<<
* info.itemsize = PyArray_ITEMSIZE(self)
* info.readonly = not PyArray_ISWRITEABLE(self)
*/
__pyx_v_info->suboffsets = NULL;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":292
* info.shape = <Py_ssize_t*>PyArray_DIMS(self)
* info.suboffsets = NULL
* info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<<
* info.readonly = not PyArray_ISWRITEABLE(self)
*
*/
__pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":293
* info.suboffsets = NULL
* info.itemsize = PyArray_ITEMSIZE(self)
* info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<<
*
* cdef int t
*/
__pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0));
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":296
*
* cdef int t
* cdef char* f = NULL # <<<<<<<<<<<<<<
* cdef dtype descr = <dtype>PyArray_DESCR(self)
* cdef int offset
*/
__pyx_v_f = NULL;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":297
* cdef int t
* cdef char* f = NULL
* cdef dtype descr = <dtype>PyArray_DESCR(self) # <<<<<<<<<<<<<<
* cdef int offset
*
*/
__pyx_t_7 = PyArray_DESCR(__pyx_v_self);
__pyx_t_3 = ((PyObject *)__pyx_t_7);
__Pyx_INCREF(__pyx_t_3);
__pyx_v_descr = ((PyArray_Descr *)__pyx_t_3);
__pyx_t_3 = 0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":300
* cdef int offset
*
* info.obj = self # <<<<<<<<<<<<<<
*
* if not PyDataType_HASFIELDS(descr):
*/
__Pyx_INCREF(((PyObject *)__pyx_v_self));
__Pyx_GIVEREF(((PyObject *)__pyx_v_self));
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj);
__pyx_v_info->obj = ((PyObject *)__pyx_v_self);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":302
* info.obj = self
*
* if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<<
* t = descr.type_num
* if ((descr.byteorder == c'>' and little_endian) or
*/
__pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0);
if (__pyx_t_1) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":303
*
* if not PyDataType_HASFIELDS(descr):
* t = descr.type_num # <<<<<<<<<<<<<<
* if ((descr.byteorder == c'>' and little_endian) or
* (descr.byteorder == c'<' and not little_endian)):
*/
__pyx_t_4 = __pyx_v_descr->type_num;
__pyx_v_t = __pyx_t_4;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":304
* if not PyDataType_HASFIELDS(descr):
* t = descr.type_num
* if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
* (descr.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported")
*/
__pyx_t_2 = ((__pyx_v_descr->byteorder == '>') != 0);
if (!__pyx_t_2) {
goto __pyx_L15_next_or;
} else {
}
__pyx_t_2 = (__pyx_v_little_endian != 0);
if (!__pyx_t_2) {
} else {
__pyx_t_1 = __pyx_t_2;
goto __pyx_L14_bool_binop_done;
}
__pyx_L15_next_or:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":305
* t = descr.type_num
* if ((descr.byteorder == c'>' and little_endian) or
* (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<<
* raise ValueError(u"Non-native byte order not supported")
* if t == NPY_BYTE: f = "b"
*/
__pyx_t_2 = ((__pyx_v_descr->byteorder == '<') != 0);
if (__pyx_t_2) {
} else {
__pyx_t_1 = __pyx_t_2;
goto __pyx_L14_bool_binop_done;
}
__pyx_t_2 = ((!(__pyx_v_little_endian != 0)) != 0);
__pyx_t_1 = __pyx_t_2;
__pyx_L14_bool_binop_done:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":304
* if not PyDataType_HASFIELDS(descr):
* t = descr.type_num
* if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
* (descr.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported")
*/
if (unlikely(__pyx_t_1)) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":306
* if ((descr.byteorder == c'>' and little_endian) or
* (descr.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<<
* if t == NPY_BYTE: f = "b"
* elif t == NPY_UBYTE: f = "B"
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 306, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(1, 306, __pyx_L1_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":304
* if not PyDataType_HASFIELDS(descr):
* t = descr.type_num
* if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
* (descr.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported")
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":307
* (descr.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported")
* if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<<
* elif t == NPY_UBYTE: f = "B"
* elif t == NPY_SHORT: f = "h"
*/
switch (__pyx_v_t) {
case NPY_BYTE:
__pyx_v_f = ((char *)"b");
break;
case NPY_UBYTE:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":308
* raise ValueError(u"Non-native byte order not supported")
* if t == NPY_BYTE: f = "b"
* elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<<
* elif t == NPY_SHORT: f = "h"
* elif t == NPY_USHORT: f = "H"
*/
__pyx_v_f = ((char *)"B");
break;
case NPY_SHORT:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":309
* if t == NPY_BYTE: f = "b"
* elif t == NPY_UBYTE: f = "B"
* elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<<
* elif t == NPY_USHORT: f = "H"
* elif t == NPY_INT: f = "i"
*/
__pyx_v_f = ((char *)"h");
break;
case NPY_USHORT:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":310
* elif t == NPY_UBYTE: f = "B"
* elif t == NPY_SHORT: f = "h"
* elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<<
* elif t == NPY_INT: f = "i"
* elif t == NPY_UINT: f = "I"
*/
__pyx_v_f = ((char *)"H");
break;
case NPY_INT:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":311
* elif t == NPY_SHORT: f = "h"
* elif t == NPY_USHORT: f = "H"
* elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<<
* elif t == NPY_UINT: f = "I"
* elif t == NPY_LONG: f = "l"
*/
__pyx_v_f = ((char *)"i");
break;
case NPY_UINT:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":312
* elif t == NPY_USHORT: f = "H"
* elif t == NPY_INT: f = "i"
* elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<<
* elif t == NPY_LONG: f = "l"
* elif t == NPY_ULONG: f = "L"
*/
__pyx_v_f = ((char *)"I");
break;
case NPY_LONG:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":313
* elif t == NPY_INT: f = "i"
* elif t == NPY_UINT: f = "I"
* elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<<
* elif t == NPY_ULONG: f = "L"
* elif t == NPY_LONGLONG: f = "q"
*/
__pyx_v_f = ((char *)"l");
break;
case NPY_ULONG:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":314
* elif t == NPY_UINT: f = "I"
* elif t == NPY_LONG: f = "l"
* elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<<
* elif t == NPY_LONGLONG: f = "q"
* elif t == NPY_ULONGLONG: f = "Q"
*/
__pyx_v_f = ((char *)"L");
break;
case NPY_LONGLONG:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":315
* elif t == NPY_LONG: f = "l"
* elif t == NPY_ULONG: f = "L"
* elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<<
* elif t == NPY_ULONGLONG: f = "Q"
* elif t == NPY_FLOAT: f = "f"
*/
__pyx_v_f = ((char *)"q");
break;
case NPY_ULONGLONG:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":316
* elif t == NPY_ULONG: f = "L"
* elif t == NPY_LONGLONG: f = "q"
* elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<<
* elif t == NPY_FLOAT: f = "f"
* elif t == NPY_DOUBLE: f = "d"
*/
__pyx_v_f = ((char *)"Q");
break;
case NPY_FLOAT:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":317
* elif t == NPY_LONGLONG: f = "q"
* elif t == NPY_ULONGLONG: f = "Q"
* elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<<
* elif t == NPY_DOUBLE: f = "d"
* elif t == NPY_LONGDOUBLE: f = "g"
*/
__pyx_v_f = ((char *)"f");
break;
case NPY_DOUBLE:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":318
* elif t == NPY_ULONGLONG: f = "Q"
* elif t == NPY_FLOAT: f = "f"
* elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<<
* elif t == NPY_LONGDOUBLE: f = "g"
* elif t == NPY_CFLOAT: f = "Zf"
*/
__pyx_v_f = ((char *)"d");
break;
case NPY_LONGDOUBLE:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":319
* elif t == NPY_FLOAT: f = "f"
* elif t == NPY_DOUBLE: f = "d"
* elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<<
* elif t == NPY_CFLOAT: f = "Zf"
* elif t == NPY_CDOUBLE: f = "Zd"
*/
__pyx_v_f = ((char *)"g");
break;
case NPY_CFLOAT:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":320
* elif t == NPY_DOUBLE: f = "d"
* elif t == NPY_LONGDOUBLE: f = "g"
* elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<<
* elif t == NPY_CDOUBLE: f = "Zd"
* elif t == NPY_CLONGDOUBLE: f = "Zg"
*/
__pyx_v_f = ((char *)"Zf");
break;
case NPY_CDOUBLE:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":321
* elif t == NPY_LONGDOUBLE: f = "g"
* elif t == NPY_CFLOAT: f = "Zf"
* elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<<
* elif t == NPY_CLONGDOUBLE: f = "Zg"
* elif t == NPY_OBJECT: f = "O"
*/
__pyx_v_f = ((char *)"Zd");
break;
case NPY_CLONGDOUBLE:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":322
* elif t == NPY_CFLOAT: f = "Zf"
* elif t == NPY_CDOUBLE: f = "Zd"
* elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<<
* elif t == NPY_OBJECT: f = "O"
* else:
*/
__pyx_v_f = ((char *)"Zg");
break;
case NPY_OBJECT:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":323
* elif t == NPY_CDOUBLE: f = "Zd"
* elif t == NPY_CLONGDOUBLE: f = "Zg"
* elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<<
* else:
* raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
*/
__pyx_v_f = ((char *)"O");
break;
default:
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":325
* elif t == NPY_OBJECT: f = "O"
* else:
* raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<<
* info.format = f
* return
*/
__pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 325, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_8 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 325, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_8);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 325, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(1, 325, __pyx_L1_error)
break;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":326
* else:
* raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
* info.format = f # <<<<<<<<<<<<<<
* return
* else:
*/
__pyx_v_info->format = __pyx_v_f;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":327
* raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
* info.format = f
* return # <<<<<<<<<<<<<<
* else:
* info.format = <char*>PyObject_Malloc(_buffer_format_string_len)
*/
__pyx_r = 0;
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":302
* info.obj = self
*
* if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<<
* t = descr.type_num
* if ((descr.byteorder == c'>' and little_endian) or
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":329
* return
* else:
* info.format = <char*>PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<<
* info.format[0] = c'^' # Native data types, manual alignment
* offset = 0
*/
/*else*/ {
__pyx_v_info->format = ((char *)PyObject_Malloc(0xFF));
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":330
* else:
* info.format = <char*>PyObject_Malloc(_buffer_format_string_len)
* info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<<
* offset = 0
* f = _util_dtypestring(descr, info.format + 1,
*/
(__pyx_v_info->format[0]) = '^';
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":331
* info.format = <char*>PyObject_Malloc(_buffer_format_string_len)
* info.format[0] = c'^' # Native data types, manual alignment
* offset = 0 # <<<<<<<<<<<<<<
* f = _util_dtypestring(descr, info.format + 1,
* info.format + _buffer_format_string_len,
*/
__pyx_v_offset = 0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":332
* info.format[0] = c'^' # Native data types, manual alignment
* offset = 0
* f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<<
* info.format + _buffer_format_string_len,
* &offset)
*/
__pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 332, __pyx_L1_error)
__pyx_v_f = __pyx_t_9;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":335
* info.format + _buffer_format_string_len,
* &offset)
* f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<<
*
* def __releasebuffer__(ndarray self, Py_buffer* info):
*/
(__pyx_v_f[0]) = '\x00';
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":258
* # experimental exception made for __getbuffer__ and __releasebuffer__
* # -- the details of this may change.
* def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<<
* # This implementation of getbuffer is geared towards Cython
* # requirements, and does not yet fulfill the PEP.
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
if (__pyx_v_info->obj != NULL) {
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
}
goto __pyx_L2;
__pyx_L0:;
if (__pyx_v_info->obj == Py_None) {
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
}
__pyx_L2:;
__Pyx_XDECREF((PyObject *)__pyx_v_descr);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":337
* f[0] = c'\0' # Terminate format string
*
* def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<<
* if PyArray_HASFIELDS(self):
* PyObject_Free(info.format)
*/
/* Python wrapper */
static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/
static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0);
__pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info));
/* function exit code */
__Pyx_RefNannyFinishContext();
}
static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) {
__Pyx_RefNannyDeclarations
int __pyx_t_1;
__Pyx_RefNannySetupContext("__releasebuffer__", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":338
*
* def __releasebuffer__(ndarray self, Py_buffer* info):
* if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<<
* PyObject_Free(info.format)
* if sizeof(npy_intp) != sizeof(Py_ssize_t):
*/
__pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0);
if (__pyx_t_1) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":339
* def __releasebuffer__(ndarray self, Py_buffer* info):
* if PyArray_HASFIELDS(self):
* PyObject_Free(info.format) # <<<<<<<<<<<<<<
* if sizeof(npy_intp) != sizeof(Py_ssize_t):
* PyObject_Free(info.strides)
*/
PyObject_Free(__pyx_v_info->format);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":338
*
* def __releasebuffer__(ndarray self, Py_buffer* info):
* if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<<
* PyObject_Free(info.format)
* if sizeof(npy_intp) != sizeof(Py_ssize_t):
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":340
* if PyArray_HASFIELDS(self):
* PyObject_Free(info.format)
* if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
* PyObject_Free(info.strides)
* # info.shape was stored after info.strides in the same block
*/
__pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0);
if (__pyx_t_1) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":341
* PyObject_Free(info.format)
* if sizeof(npy_intp) != sizeof(Py_ssize_t):
* PyObject_Free(info.strides) # <<<<<<<<<<<<<<
* # info.shape was stored after info.strides in the same block
*
*/
PyObject_Free(__pyx_v_info->strides);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":340
* if PyArray_HASFIELDS(self):
* PyObject_Free(info.format)
* if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<<
* PyObject_Free(info.strides)
* # info.shape was stored after info.strides in the same block
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":337
* f[0] = c'\0' # Terminate format string
*
* def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<<
* if PyArray_HASFIELDS(self):
* PyObject_Free(info.format)
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":821
* ctypedef npy_cdouble complex_t
*
* cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(1, <void*>a)
*
*/
static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":822
*
* cdef inline object PyArray_MultiIterNew1(a):
* return PyArray_MultiIterNew(1, <void*>a) # <<<<<<<<<<<<<<
*
* cdef inline object PyArray_MultiIterNew2(a, b):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 822, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":821
* ctypedef npy_cdouble complex_t
*
* cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(1, <void*>a)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":824
* return PyArray_MultiIterNew(1, <void*>a)
*
* cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(2, <void*>a, <void*>b)
*
*/
static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":825
*
* cdef inline object PyArray_MultiIterNew2(a, b):
* return PyArray_MultiIterNew(2, <void*>a, <void*>b) # <<<<<<<<<<<<<<
*
* cdef inline object PyArray_MultiIterNew3(a, b, c):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 825, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":824
* return PyArray_MultiIterNew(1, <void*>a)
*
* cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(2, <void*>a, <void*>b)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":827
* return PyArray_MultiIterNew(2, <void*>a, <void*>b)
*
* cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)
*
*/
static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":828
*
* cdef inline object PyArray_MultiIterNew3(a, b, c):
* return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c) # <<<<<<<<<<<<<<
*
* cdef inline object PyArray_MultiIterNew4(a, b, c, d):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 828, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":827
* return PyArray_MultiIterNew(2, <void*>a, <void*>b)
*
* cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":830
* return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)
*
* cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)
*
*/
static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":831
*
* cdef inline object PyArray_MultiIterNew4(a, b, c, d):
* return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d) # <<<<<<<<<<<<<<
*
* cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 831, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":830
* return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)
*
* cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":833
* return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)
*
* cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)
*
*/
static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":834
*
* cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):
* return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e) # <<<<<<<<<<<<<<
*
* cdef inline tuple PyDataType_SHAPE(dtype d):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 834, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":833
* return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)
*
* cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<<
* return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":836
* return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)
*
* cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<<
* if PyDataType_HASSUBARRAY(d):
* return <tuple>d.subarray.shape
*/
static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
__Pyx_RefNannySetupContext("PyDataType_SHAPE", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":837
*
* cdef inline tuple PyDataType_SHAPE(dtype d):
* if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<<
* return <tuple>d.subarray.shape
* else:
*/
__pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0);
if (__pyx_t_1) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":838
* cdef inline tuple PyDataType_SHAPE(dtype d):
* if PyDataType_HASSUBARRAY(d):
* return <tuple>d.subarray.shape # <<<<<<<<<<<<<<
* else:
* return ()
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape));
__pyx_r = ((PyObject*)__pyx_v_d->subarray->shape);
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":837
*
* cdef inline tuple PyDataType_SHAPE(dtype d):
* if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<<
* return <tuple>d.subarray.shape
* else:
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":840
* return <tuple>d.subarray.shape
* else:
* return () # <<<<<<<<<<<<<<
*
* cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL:
*/
/*else*/ {
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_empty_tuple);
__pyx_r = __pyx_empty_tuple;
goto __pyx_L0;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":836
* return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)
*
* cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<<
* if PyDataType_HASSUBARRAY(d):
* return <tuple>d.subarray.shape
*/
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":842
* return ()
*
* cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<<
* # Recursive utility function used in __getbuffer__ to get format
* # string. The new location in the format string is returned.
*/
static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) {
PyArray_Descr *__pyx_v_child = 0;
int __pyx_v_endian_detector;
int __pyx_v_little_endian;
PyObject *__pyx_v_fields = 0;
PyObject *__pyx_v_childname = NULL;
PyObject *__pyx_v_new_offset = NULL;
PyObject *__pyx_v_t = NULL;
char *__pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
Py_ssize_t __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
int __pyx_t_5;
int __pyx_t_6;
int __pyx_t_7;
long __pyx_t_8;
char *__pyx_t_9;
__Pyx_RefNannySetupContext("_util_dtypestring", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":847
*
* cdef dtype child
* cdef int endian_detector = 1 # <<<<<<<<<<<<<<
* cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)
* cdef tuple fields
*/
__pyx_v_endian_detector = 1;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":848
* cdef dtype child
* cdef int endian_detector = 1
* cdef bint little_endian = ((<char*>&endian_detector)[0] != 0) # <<<<<<<<<<<<<<
* cdef tuple fields
*
*/
__pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":851
* cdef tuple fields
*
* for childname in descr.names: # <<<<<<<<<<<<<<
* fields = descr.fields[childname]
* child, new_offset = fields
*/
if (unlikely(__pyx_v_descr->names == Py_None)) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable");
__PYX_ERR(1, 851, __pyx_L1_error)
}
__pyx_t_1 = __pyx_v_descr->names; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0;
for (;;) {
if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(1, 851, __pyx_L1_error)
#else
__pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 851, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
#endif
__Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3);
__pyx_t_3 = 0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":852
*
* for childname in descr.names:
* fields = descr.fields[childname] # <<<<<<<<<<<<<<
* child, new_offset = fields
*
*/
if (unlikely(__pyx_v_descr->fields == Py_None)) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
__PYX_ERR(1, 852, __pyx_L1_error)
}
__pyx_t_3 = __Pyx_PyDict_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 852, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(1, 852, __pyx_L1_error)
__Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3));
__pyx_t_3 = 0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":853
* for childname in descr.names:
* fields = descr.fields[childname]
* child, new_offset = fields # <<<<<<<<<<<<<<
*
* if (end - f) - <int>(new_offset - offset[0]) < 15:
*/
if (likely(__pyx_v_fields != Py_None)) {
PyObject* sequence = __pyx_v_fields;
Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
if (unlikely(size != 2)) {
if (size > 2) __Pyx_RaiseTooManyValuesError(2);
else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
__PYX_ERR(1, 853, __pyx_L1_error)
}
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_3 = PyTuple_GET_ITEM(sequence, 0);
__pyx_t_4 = PyTuple_GET_ITEM(sequence, 1);
__Pyx_INCREF(__pyx_t_3);
__Pyx_INCREF(__pyx_t_4);
#else
__pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 853, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 853, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
#endif
} else {
__Pyx_RaiseNoneNotIterableError(); __PYX_ERR(1, 853, __pyx_L1_error)
}
if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) __PYX_ERR(1, 853, __pyx_L1_error)
__Pyx_XDECREF_SET(__pyx_v_child, ((PyArray_Descr *)__pyx_t_3));
__pyx_t_3 = 0;
__Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4);
__pyx_t_4 = 0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":855
* child, new_offset = fields
*
* if (end - f) - <int>(new_offset - offset[0]) < 15: # <<<<<<<<<<<<<<
* raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
*
*/
__pyx_t_4 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 855, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 855, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 855, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0);
if (unlikely(__pyx_t_6)) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":856
*
* if (end - f) - <int>(new_offset - offset[0]) < 15:
* raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<<
*
* if ((child.byteorder == c'>' and little_endian) or
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 856, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(1, 856, __pyx_L1_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":855
* child, new_offset = fields
*
* if (end - f) - <int>(new_offset - offset[0]) < 15: # <<<<<<<<<<<<<<
* raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
*
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":858
* raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
*
* if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
* (child.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported")
*/
__pyx_t_7 = ((__pyx_v_child->byteorder == '>') != 0);
if (!__pyx_t_7) {
goto __pyx_L8_next_or;
} else {
}
__pyx_t_7 = (__pyx_v_little_endian != 0);
if (!__pyx_t_7) {
} else {
__pyx_t_6 = __pyx_t_7;
goto __pyx_L7_bool_binop_done;
}
__pyx_L8_next_or:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":859
*
* if ((child.byteorder == c'>' and little_endian) or
* (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<<
* raise ValueError(u"Non-native byte order not supported")
* # One could encode it in the format string and have Cython
*/
__pyx_t_7 = ((__pyx_v_child->byteorder == '<') != 0);
if (__pyx_t_7) {
} else {
__pyx_t_6 = __pyx_t_7;
goto __pyx_L7_bool_binop_done;
}
__pyx_t_7 = ((!(__pyx_v_little_endian != 0)) != 0);
__pyx_t_6 = __pyx_t_7;
__pyx_L7_bool_binop_done:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":858
* raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
*
* if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
* (child.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported")
*/
if (unlikely(__pyx_t_6)) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":860
* if ((child.byteorder == c'>' and little_endian) or
* (child.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<<
* # One could encode it in the format string and have Cython
* # complain instead, BUT: < and > in format strings also imply
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 860, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(1, 860, __pyx_L1_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":858
* raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd")
*
* if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<<
* (child.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported")
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":870
*
* # Output padding bytes
* while offset[0] < new_offset: # <<<<<<<<<<<<<<
* f[0] = 120 # "x"; pad byte
* f += 1
*/
while (1) {
__pyx_t_3 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 870, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_t_3, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 870, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 870, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (!__pyx_t_6) break;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":871
* # Output padding bytes
* while offset[0] < new_offset:
* f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<<
* f += 1
* offset[0] += 1
*/
(__pyx_v_f[0]) = 0x78;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":872
* while offset[0] < new_offset:
* f[0] = 120 # "x"; pad byte
* f += 1 # <<<<<<<<<<<<<<
* offset[0] += 1
*
*/
__pyx_v_f = (__pyx_v_f + 1);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":873
* f[0] = 120 # "x"; pad byte
* f += 1
* offset[0] += 1 # <<<<<<<<<<<<<<
*
* offset[0] += child.itemsize
*/
__pyx_t_8 = 0;
(__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1);
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":875
* offset[0] += 1
*
* offset[0] += child.itemsize # <<<<<<<<<<<<<<
*
* if not PyDataType_HASFIELDS(child):
*/
__pyx_t_8 = 0;
(__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":877
* offset[0] += child.itemsize
*
* if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<<
* t = child.type_num
* if end - f < 5:
*/
__pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0);
if (__pyx_t_6) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":878
*
* if not PyDataType_HASFIELDS(child):
* t = child.type_num # <<<<<<<<<<<<<<
* if end - f < 5:
* raise RuntimeError(u"Format string allocated too short.")
*/
__pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_child->type_num); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 878, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4);
__pyx_t_4 = 0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":879
* if not PyDataType_HASFIELDS(child):
* t = child.type_num
* if end - f < 5: # <<<<<<<<<<<<<<
* raise RuntimeError(u"Format string allocated too short.")
*
*/
__pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0);
if (unlikely(__pyx_t_6)) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":880
* t = child.type_num
* if end - f < 5:
* raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<<
*
* # Until ticket #99 is fixed, use integers to avoid warnings
*/
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 880, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_Raise(__pyx_t_4, 0, 0, 0);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__PYX_ERR(1, 880, __pyx_L1_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":879
* if not PyDataType_HASFIELDS(child):
* t = child.type_num
* if end - f < 5: # <<<<<<<<<<<<<<
* raise RuntimeError(u"Format string allocated too short.")
*
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":883
*
* # Until ticket #99 is fixed, use integers to avoid warnings
* if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<<
* elif t == NPY_UBYTE: f[0] = 66 #"B"
* elif t == NPY_SHORT: f[0] = 104 #"h"
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_BYTE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 883, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 883, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 883, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 98;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":884
* # Until ticket #99 is fixed, use integers to avoid warnings
* if t == NPY_BYTE: f[0] = 98 #"b"
* elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<<
* elif t == NPY_SHORT: f[0] = 104 #"h"
* elif t == NPY_USHORT: f[0] = 72 #"H"
*/
__pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UBYTE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 884, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 884, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 884, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 66;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":885
* if t == NPY_BYTE: f[0] = 98 #"b"
* elif t == NPY_UBYTE: f[0] = 66 #"B"
* elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<<
* elif t == NPY_USHORT: f[0] = 72 #"H"
* elif t == NPY_INT: f[0] = 105 #"i"
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_SHORT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 885, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 885, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 885, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 0x68;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":886
* elif t == NPY_UBYTE: f[0] = 66 #"B"
* elif t == NPY_SHORT: f[0] = 104 #"h"
* elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<<
* elif t == NPY_INT: f[0] = 105 #"i"
* elif t == NPY_UINT: f[0] = 73 #"I"
*/
__pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_USHORT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 886, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 886, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 886, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 72;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":887
* elif t == NPY_SHORT: f[0] = 104 #"h"
* elif t == NPY_USHORT: f[0] = 72 #"H"
* elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<<
* elif t == NPY_UINT: f[0] = 73 #"I"
* elif t == NPY_LONG: f[0] = 108 #"l"
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_INT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 887, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 887, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 887, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 0x69;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":888
* elif t == NPY_USHORT: f[0] = 72 #"H"
* elif t == NPY_INT: f[0] = 105 #"i"
* elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<<
* elif t == NPY_LONG: f[0] = 108 #"l"
* elif t == NPY_ULONG: f[0] = 76 #"L"
*/
__pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UINT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 888, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 888, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 888, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 73;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":889
* elif t == NPY_INT: f[0] = 105 #"i"
* elif t == NPY_UINT: f[0] = 73 #"I"
* elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<<
* elif t == NPY_ULONG: f[0] = 76 #"L"
* elif t == NPY_LONGLONG: f[0] = 113 #"q"
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 889, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 889, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 889, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 0x6C;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":890
* elif t == NPY_UINT: f[0] = 73 #"I"
* elif t == NPY_LONG: f[0] = 108 #"l"
* elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<<
* elif t == NPY_LONGLONG: f[0] = 113 #"q"
* elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
*/
__pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 890, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 890, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 890, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 76;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":891
* elif t == NPY_LONG: f[0] = 108 #"l"
* elif t == NPY_ULONG: f[0] = 76 #"L"
* elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<<
* elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
* elif t == NPY_FLOAT: f[0] = 102 #"f"
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGLONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 891, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 891, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 891, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 0x71;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":892
* elif t == NPY_ULONG: f[0] = 76 #"L"
* elif t == NPY_LONGLONG: f[0] = 113 #"q"
* elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<<
* elif t == NPY_FLOAT: f[0] = 102 #"f"
* elif t == NPY_DOUBLE: f[0] = 100 #"d"
*/
__pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONGLONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 892, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 892, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 892, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 81;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":893
* elif t == NPY_LONGLONG: f[0] = 113 #"q"
* elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
* elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<<
* elif t == NPY_DOUBLE: f[0] = 100 #"d"
* elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_FLOAT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 893, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 893, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 893, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 0x66;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":894
* elif t == NPY_ULONGLONG: f[0] = 81 #"Q"
* elif t == NPY_FLOAT: f[0] = 102 #"f"
* elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<<
* elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
* elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
*/
__pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_DOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 894, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 894, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 894, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 0x64;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":895
* elif t == NPY_FLOAT: f[0] = 102 #"f"
* elif t == NPY_DOUBLE: f[0] = 100 #"d"
* elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<<
* elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
* elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 895, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 895, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 895, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 0x67;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":896
* elif t == NPY_DOUBLE: f[0] = 100 #"d"
* elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
* elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<<
* elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
* elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
*/
__pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CFLOAT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 896, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 896, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 896, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 90;
(__pyx_v_f[1]) = 0x66;
__pyx_v_f = (__pyx_v_f + 1);
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":897
* elif t == NPY_LONGDOUBLE: f[0] = 103 #"g"
* elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
* elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<<
* elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
* elif t == NPY_OBJECT: f[0] = 79 #"O"
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 897, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 897, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 897, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 90;
(__pyx_v_f[1]) = 0x64;
__pyx_v_f = (__pyx_v_f + 1);
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":898
* elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf
* elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
* elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<<
* elif t == NPY_OBJECT: f[0] = 79 #"O"
* else:
*/
__pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 898, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 898, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 898, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (__pyx_t_6) {
(__pyx_v_f[0]) = 90;
(__pyx_v_f[1]) = 0x67;
__pyx_v_f = (__pyx_v_f + 1);
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":899
* elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd
* elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg
* elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<<
* else:
* raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
*/
__pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_OBJECT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 899, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 899, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 899, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (likely(__pyx_t_6)) {
(__pyx_v_f[0]) = 79;
goto __pyx_L15;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":901
* elif t == NPY_OBJECT: f[0] = 79 #"O"
* else:
* raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<<
* f += 1
* else:
*/
/*else*/ {
__pyx_t_3 = __Pyx_PyUnicode_FormatSafe(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 901, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 901, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_Raise(__pyx_t_4, 0, 0, 0);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__PYX_ERR(1, 901, __pyx_L1_error)
}
__pyx_L15:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":902
* else:
* raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t)
* f += 1 # <<<<<<<<<<<<<<
* else:
* # Cython ignores struct boundary information ("T{...}"),
*/
__pyx_v_f = (__pyx_v_f + 1);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":877
* offset[0] += child.itemsize
*
* if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<<
* t = child.type_num
* if end - f < 5:
*/
goto __pyx_L13;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":906
* # Cython ignores struct boundary information ("T{...}"),
* # so don't output it
* f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<<
* return f
*
*/
/*else*/ {
__pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 906, __pyx_L1_error)
__pyx_v_f = __pyx_t_9;
}
__pyx_L13:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":851
* cdef tuple fields
*
* for childname in descr.names: # <<<<<<<<<<<<<<
* fields = descr.fields[childname]
* child, new_offset = fields
*/
}
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":907
* # so don't output it
* f = _util_dtypestring(child, f, end, offset)
* return f # <<<<<<<<<<<<<<
*
*
*/
__pyx_r = __pyx_v_f;
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":842
* return ()
*
* cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<<
* # Recursive utility function used in __getbuffer__ to get format
* # string. The new location in the format string is returned.
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_child);
__Pyx_XDECREF(__pyx_v_fields);
__Pyx_XDECREF(__pyx_v_childname);
__Pyx_XDECREF(__pyx_v_new_offset);
__Pyx_XDECREF(__pyx_v_t);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1022
* int _import_umath() except -1
*
* cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<<
* Py_INCREF(base) # important to do this before stealing the reference below!
* PyArray_SetBaseObject(arr, base)
*/
static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("set_array_base", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1023
*
* cdef inline void set_array_base(ndarray arr, object base):
* Py_INCREF(base) # important to do this before stealing the reference below! # <<<<<<<<<<<<<<
* PyArray_SetBaseObject(arr, base)
*
*/
Py_INCREF(__pyx_v_base);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1024
* cdef inline void set_array_base(ndarray arr, object base):
* Py_INCREF(base) # important to do this before stealing the reference below!
* PyArray_SetBaseObject(arr, base) # <<<<<<<<<<<<<<
*
* cdef inline object get_array_base(ndarray arr):
*/
(void)(PyArray_SetBaseObject(__pyx_v_arr, __pyx_v_base));
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1022
* int _import_umath() except -1
*
* cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<<
* Py_INCREF(base) # important to do this before stealing the reference below!
* PyArray_SetBaseObject(arr, base)
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1026
* PyArray_SetBaseObject(arr, base)
*
* cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<<
* base = PyArray_BASE(arr)
* if base is NULL:
*/
static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) {
PyObject *__pyx_v_base;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
__Pyx_RefNannySetupContext("get_array_base", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1027
*
* cdef inline object get_array_base(ndarray arr):
* base = PyArray_BASE(arr) # <<<<<<<<<<<<<<
* if base is NULL:
* return None
*/
__pyx_v_base = PyArray_BASE(__pyx_v_arr);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1028
* cdef inline object get_array_base(ndarray arr):
* base = PyArray_BASE(arr)
* if base is NULL: # <<<<<<<<<<<<<<
* return None
* return <object>base
*/
__pyx_t_1 = ((__pyx_v_base == NULL) != 0);
if (__pyx_t_1) {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1029
* base = PyArray_BASE(arr)
* if base is NULL:
* return None # <<<<<<<<<<<<<<
* return <object>base
*
*/
__Pyx_XDECREF(__pyx_r);
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1028
* cdef inline object get_array_base(ndarray arr):
* base = PyArray_BASE(arr)
* if base is NULL: # <<<<<<<<<<<<<<
* return None
* return <object>base
*/
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1030
* if base is NULL:
* return None
* return <object>base # <<<<<<<<<<<<<<
*
* # Versions of the import_* functions which are more suitable for
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(((PyObject *)__pyx_v_base));
__pyx_r = ((PyObject *)__pyx_v_base);
goto __pyx_L0;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1026
* PyArray_SetBaseObject(arr, base)
*
* cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<<
* base = PyArray_BASE(arr)
* if base is NULL:
*/
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1034
* # Versions of the import_* functions which are more suitable for
* # Cython code.
* cdef inline int import_array() except -1: # <<<<<<<<<<<<<<
* try:
* _import_array()
*/
static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) {
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_t_4;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
__Pyx_RefNannySetupContext("import_array", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1035
* # Cython code.
* cdef inline int import_array() except -1:
* try: # <<<<<<<<<<<<<<
* _import_array()
* except Exception:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_1);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
/*try:*/ {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1036
* cdef inline int import_array() except -1:
* try:
* _import_array() # <<<<<<<<<<<<<<
* except Exception:
* raise ImportError("numpy.core.multiarray failed to import")
*/
__pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1036, __pyx_L3_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1035
* # Cython code.
* cdef inline int import_array() except -1:
* try: # <<<<<<<<<<<<<<
* _import_array()
* except Exception:
*/
}
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
goto __pyx_L8_try_end;
__pyx_L3_error:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1037
* try:
* _import_array()
* except Exception: # <<<<<<<<<<<<<<
* raise ImportError("numpy.core.multiarray failed to import")
*
*/
__pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])));
if (__pyx_t_4) {
__Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1037, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_GOTREF(__pyx_t_7);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1038
* _import_array()
* except Exception:
* raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<<
*
* cdef inline int import_umath() except -1:
*/
__pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1038, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_8);
__Pyx_Raise(__pyx_t_8, 0, 0, 0);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
__PYX_ERR(1, 1038, __pyx_L5_except_error)
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1035
* # Cython code.
* cdef inline int import_array() except -1:
* try: # <<<<<<<<<<<<<<
* _import_array()
* except Exception:
*/
__Pyx_XGIVEREF(__pyx_t_1);
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3);
goto __pyx_L1_error;
__pyx_L8_try_end:;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1034
* # Versions of the import_* functions which are more suitable for
* # Cython code.
* cdef inline int import_array() except -1: # <<<<<<<<<<<<<<
* try:
* _import_array()
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1040
* raise ImportError("numpy.core.multiarray failed to import")
*
* cdef inline int import_umath() except -1: # <<<<<<<<<<<<<<
* try:
* _import_umath()
*/
static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) {
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_t_4;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
__Pyx_RefNannySetupContext("import_umath", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1041
*
* cdef inline int import_umath() except -1:
* try: # <<<<<<<<<<<<<<
* _import_umath()
* except Exception:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_1);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
/*try:*/ {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1042
* cdef inline int import_umath() except -1:
* try:
* _import_umath() # <<<<<<<<<<<<<<
* except Exception:
* raise ImportError("numpy.core.umath failed to import")
*/
__pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1042, __pyx_L3_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1041
*
* cdef inline int import_umath() except -1:
* try: # <<<<<<<<<<<<<<
* _import_umath()
* except Exception:
*/
}
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
goto __pyx_L8_try_end;
__pyx_L3_error:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1043
* try:
* _import_umath()
* except Exception: # <<<<<<<<<<<<<<
* raise ImportError("numpy.core.umath failed to import")
*
*/
__pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])));
if (__pyx_t_4) {
__Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1043, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_GOTREF(__pyx_t_7);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1044
* _import_umath()
* except Exception:
* raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<<
*
* cdef inline int import_ufunc() except -1:
*/
__pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1044, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_8);
__Pyx_Raise(__pyx_t_8, 0, 0, 0);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
__PYX_ERR(1, 1044, __pyx_L5_except_error)
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1041
*
* cdef inline int import_umath() except -1:
* try: # <<<<<<<<<<<<<<
* _import_umath()
* except Exception:
*/
__Pyx_XGIVEREF(__pyx_t_1);
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3);
goto __pyx_L1_error;
__pyx_L8_try_end:;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1040
* raise ImportError("numpy.core.multiarray failed to import")
*
* cdef inline int import_umath() except -1: # <<<<<<<<<<<<<<
* try:
* _import_umath()
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1046
* raise ImportError("numpy.core.umath failed to import")
*
* cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<<
* try:
* _import_umath()
*/
static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) {
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_t_4;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
__Pyx_RefNannySetupContext("import_ufunc", 0);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1047
*
* cdef inline int import_ufunc() except -1:
* try: # <<<<<<<<<<<<<<
* _import_umath()
* except Exception:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_1);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
/*try:*/ {
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1048
* cdef inline int import_ufunc() except -1:
* try:
* _import_umath() # <<<<<<<<<<<<<<
* except Exception:
* raise ImportError("numpy.core.umath failed to import")
*/
__pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1048, __pyx_L3_error)
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1047
*
* cdef inline int import_ufunc() except -1:
* try: # <<<<<<<<<<<<<<
* _import_umath()
* except Exception:
*/
}
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
goto __pyx_L8_try_end;
__pyx_L3_error:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1049
* try:
* _import_umath()
* except Exception: # <<<<<<<<<<<<<<
* raise ImportError("numpy.core.umath failed to import")
*/
__pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])));
if (__pyx_t_4) {
__Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1049, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GOTREF(__pyx_t_6);
__Pyx_GOTREF(__pyx_t_7);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1050
* _import_umath()
* except Exception:
* raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<<
*/
__pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1050, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_8);
__Pyx_Raise(__pyx_t_8, 0, 0, 0);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
__PYX_ERR(1, 1050, __pyx_L5_except_error)
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1047
*
* cdef inline int import_ufunc() except -1:
* try: # <<<<<<<<<<<<<<
* _import_umath()
* except Exception:
*/
__Pyx_XGIVEREF(__pyx_t_1);
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3);
goto __pyx_L1_error;
__pyx_L8_try_end:;
}
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1046
* raise ImportError("numpy.core.umath failed to import")
*
* cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<<
* try:
* _import_umath()
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":122
* cdef bint dtype_is_object
*
* def __cinit__(array self, tuple shape, Py_ssize_t itemsize, format not None, # <<<<<<<<<<<<<<
* mode="c", bint allocate_buffer=True):
*
*/
/* Python wrapper */
static int __pyx_array___cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_array___cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_shape = 0;
Py_ssize_t __pyx_v_itemsize;
PyObject *__pyx_v_format = 0;
PyObject *__pyx_v_mode = 0;
int __pyx_v_allocate_buffer;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_shape,&__pyx_n_s_itemsize,&__pyx_n_s_format,&__pyx_n_s_mode,&__pyx_n_s_allocate_buffer,0};
PyObject* values[5] = {0,0,0,0,0};
values[3] = ((PyObject *)__pyx_n_s_c);
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
CYTHON_FALLTHROUGH;
case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
CYTHON_FALLTHROUGH;
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
CYTHON_FALLTHROUGH;
case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
CYTHON_FALLTHROUGH;
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_shape)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
CYTHON_FALLTHROUGH;
case 1:
if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_itemsize)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 0, 3, 5, 1); __PYX_ERR(2, 122, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 2:
if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_format)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 0, 3, 5, 2); __PYX_ERR(2, 122, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 3:
if (kw_args > 0) {
PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_mode);
if (value) { values[3] = value; kw_args--; }
}
CYTHON_FALLTHROUGH;
case 4:
if (kw_args > 0) {
PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_allocate_buffer);
if (value) { values[4] = value; kw_args--; }
}
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(2, 122, __pyx_L3_error)
}
} else {
switch (PyTuple_GET_SIZE(__pyx_args)) {
case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
CYTHON_FALLTHROUGH;
case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
CYTHON_FALLTHROUGH;
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
break;
default: goto __pyx_L5_argtuple_error;
}
}
__pyx_v_shape = ((PyObject*)values[0]);
__pyx_v_itemsize = __Pyx_PyIndex_AsSsize_t(values[1]); if (unlikely((__pyx_v_itemsize == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 122, __pyx_L3_error)
__pyx_v_format = values[2];
__pyx_v_mode = values[3];
if (values[4]) {
__pyx_v_allocate_buffer = __Pyx_PyObject_IsTrue(values[4]); if (unlikely((__pyx_v_allocate_buffer == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 123, __pyx_L3_error)
} else {
/* "View.MemoryView":123
*
* def __cinit__(array self, tuple shape, Py_ssize_t itemsize, format not None,
* mode="c", bint allocate_buffer=True): # <<<<<<<<<<<<<<
*
* cdef int idx
*/
__pyx_v_allocate_buffer = ((int)1);
}
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("__cinit__", 0, 3, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 122, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("View.MemoryView.array.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return -1;
__pyx_L4_argument_unpacking_done:;
if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_shape), (&PyTuple_Type), 1, "shape", 1))) __PYX_ERR(2, 122, __pyx_L1_error)
if (unlikely(((PyObject *)__pyx_v_format) == Py_None)) {
PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "format"); __PYX_ERR(2, 122, __pyx_L1_error)
}
__pyx_r = __pyx_array___pyx_pf_15View_dot_MemoryView_5array___cinit__(((struct __pyx_array_obj *)__pyx_v_self), __pyx_v_shape, __pyx_v_itemsize, __pyx_v_format, __pyx_v_mode, __pyx_v_allocate_buffer);
/* "View.MemoryView":122
* cdef bint dtype_is_object
*
* def __cinit__(array self, tuple shape, Py_ssize_t itemsize, format not None, # <<<<<<<<<<<<<<
* mode="c", bint allocate_buffer=True):
*
*/
/* function exit code */
goto __pyx_L0;
__pyx_L1_error:;
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_array___pyx_pf_15View_dot_MemoryView_5array___cinit__(struct __pyx_array_obj *__pyx_v_self, PyObject *__pyx_v_shape, Py_ssize_t __pyx_v_itemsize, PyObject *__pyx_v_format, PyObject *__pyx_v_mode, int __pyx_v_allocate_buffer) {
int __pyx_v_idx;
Py_ssize_t __pyx_v_i;
Py_ssize_t __pyx_v_dim;
PyObject **__pyx_v_p;
char __pyx_v_order;
int __pyx_r;
__Pyx_RefNannyDeclarations
Py_ssize_t __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
int __pyx_t_4;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
char *__pyx_t_7;
int __pyx_t_8;
Py_ssize_t __pyx_t_9;
PyObject *__pyx_t_10 = NULL;
Py_ssize_t __pyx_t_11;
__Pyx_RefNannySetupContext("__cinit__", 0);
__Pyx_INCREF(__pyx_v_format);
/* "View.MemoryView":129
* cdef PyObject **p
*
* self.ndim = <int> len(shape) # <<<<<<<<<<<<<<
* self.itemsize = itemsize
*
*/
if (unlikely(__pyx_v_shape == Py_None)) {
PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()");
__PYX_ERR(2, 129, __pyx_L1_error)
}
__pyx_t_1 = PyTuple_GET_SIZE(__pyx_v_shape); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(2, 129, __pyx_L1_error)
__pyx_v_self->ndim = ((int)__pyx_t_1);
/* "View.MemoryView":130
*
* self.ndim = <int> len(shape)
* self.itemsize = itemsize # <<<<<<<<<<<<<<
*
* if not self.ndim:
*/
__pyx_v_self->itemsize = __pyx_v_itemsize;
/* "View.MemoryView":132
* self.itemsize = itemsize
*
* if not self.ndim: # <<<<<<<<<<<<<<
* raise ValueError("Empty shape tuple for cython.array")
*
*/
__pyx_t_2 = ((!(__pyx_v_self->ndim != 0)) != 0);
if (unlikely(__pyx_t_2)) {
/* "View.MemoryView":133
*
* if not self.ndim:
* raise ValueError("Empty shape tuple for cython.array") # <<<<<<<<<<<<<<
*
* if itemsize <= 0:
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 133, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(2, 133, __pyx_L1_error)
/* "View.MemoryView":132
* self.itemsize = itemsize
*
* if not self.ndim: # <<<<<<<<<<<<<<
* raise ValueError("Empty shape tuple for cython.array")
*
*/
}
/* "View.MemoryView":135
* raise ValueError("Empty shape tuple for cython.array")
*
* if itemsize <= 0: # <<<<<<<<<<<<<<
* raise ValueError("itemsize <= 0 for cython.array")
*
*/
__pyx_t_2 = ((__pyx_v_itemsize <= 0) != 0);
if (unlikely(__pyx_t_2)) {
/* "View.MemoryView":136
*
* if itemsize <= 0:
* raise ValueError("itemsize <= 0 for cython.array") # <<<<<<<<<<<<<<
*
* if not isinstance(format, bytes):
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 136, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(2, 136, __pyx_L1_error)
/* "View.MemoryView":135
* raise ValueError("Empty shape tuple for cython.array")
*
* if itemsize <= 0: # <<<<<<<<<<<<<<
* raise ValueError("itemsize <= 0 for cython.array")
*
*/
}
/* "View.MemoryView":138
* raise ValueError("itemsize <= 0 for cython.array")
*
* if not isinstance(format, bytes): # <<<<<<<<<<<<<<
* format = format.encode('ASCII')
* self._format = format # keep a reference to the byte string
*/
__pyx_t_2 = PyBytes_Check(__pyx_v_format);
__pyx_t_4 = ((!(__pyx_t_2 != 0)) != 0);
if (__pyx_t_4) {
/* "View.MemoryView":139
*
* if not isinstance(format, bytes):
* format = format.encode('ASCII') # <<<<<<<<<<<<<<
* self._format = format # keep a reference to the byte string
* self.format = self._format
*/
__pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_format, __pyx_n_s_encode); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 139, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_6 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
__pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5);
if (likely(__pyx_t_6)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
__Pyx_INCREF(__pyx_t_6);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_5, function);
}
}
__pyx_t_3 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_6, __pyx_n_s_ASCII) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_n_s_ASCII);
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 139, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF_SET(__pyx_v_format, __pyx_t_3);
__pyx_t_3 = 0;
/* "View.MemoryView":138
* raise ValueError("itemsize <= 0 for cython.array")
*
* if not isinstance(format, bytes): # <<<<<<<<<<<<<<
* format = format.encode('ASCII')
* self._format = format # keep a reference to the byte string
*/
}
/* "View.MemoryView":140
* if not isinstance(format, bytes):
* format = format.encode('ASCII')
* self._format = format # keep a reference to the byte string # <<<<<<<<<<<<<<
* self.format = self._format
*
*/
if (!(likely(PyBytes_CheckExact(__pyx_v_format))||((__pyx_v_format) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytes", Py_TYPE(__pyx_v_format)->tp_name), 0))) __PYX_ERR(2, 140, __pyx_L1_error)
__pyx_t_3 = __pyx_v_format;
__Pyx_INCREF(__pyx_t_3);
__Pyx_GIVEREF(__pyx_t_3);
__Pyx_GOTREF(__pyx_v_self->_format);
__Pyx_DECREF(__pyx_v_self->_format);
__pyx_v_self->_format = ((PyObject*)__pyx_t_3);
__pyx_t_3 = 0;
/* "View.MemoryView":141
* format = format.encode('ASCII')
* self._format = format # keep a reference to the byte string
* self.format = self._format # <<<<<<<<<<<<<<
*
*
*/
if (unlikely(__pyx_v_self->_format == Py_None)) {
PyErr_SetString(PyExc_TypeError, "expected bytes, NoneType found");
__PYX_ERR(2, 141, __pyx_L1_error)
}
__pyx_t_7 = __Pyx_PyBytes_AsWritableString(__pyx_v_self->_format); if (unlikely((!__pyx_t_7) && PyErr_Occurred())) __PYX_ERR(2, 141, __pyx_L1_error)
__pyx_v_self->format = __pyx_t_7;
/* "View.MemoryView":144
*
*
* self._shape = <Py_ssize_t *> PyObject_Malloc(sizeof(Py_ssize_t)*self.ndim*2) # <<<<<<<<<<<<<<
* self._strides = self._shape + self.ndim
*
*/
__pyx_v_self->_shape = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * __pyx_v_self->ndim) * 2)));
/* "View.MemoryView":145
*
* self._shape = <Py_ssize_t *> PyObject_Malloc(sizeof(Py_ssize_t)*self.ndim*2)
* self._strides = self._shape + self.ndim # <<<<<<<<<<<<<<
*
* if not self._shape:
*/
__pyx_v_self->_strides = (__pyx_v_self->_shape + __pyx_v_self->ndim);
/* "View.MemoryView":147
* self._strides = self._shape + self.ndim
*
* if not self._shape: # <<<<<<<<<<<<<<
* raise MemoryError("unable to allocate shape and strides.")
*
*/
__pyx_t_4 = ((!(__pyx_v_self->_shape != 0)) != 0);
if (unlikely(__pyx_t_4)) {
/* "View.MemoryView":148
*
* if not self._shape:
* raise MemoryError("unable to allocate shape and strides.") # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_MemoryError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 148, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(2, 148, __pyx_L1_error)
/* "View.MemoryView":147
* self._strides = self._shape + self.ndim
*
* if not self._shape: # <<<<<<<<<<<<<<
* raise MemoryError("unable to allocate shape and strides.")
*
*/
}
/* "View.MemoryView":151
*
*
* for idx, dim in enumerate(shape): # <<<<<<<<<<<<<<
* if dim <= 0:
* raise ValueError("Invalid shape in axis %d: %d." % (idx, dim))
*/
__pyx_t_8 = 0;
__pyx_t_3 = __pyx_v_shape; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0;
for (;;) {
if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(2, 151, __pyx_L1_error)
#else
__pyx_t_5 = PySequence_ITEM(__pyx_t_3, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 151, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
#endif
__pyx_t_9 = __Pyx_PyIndex_AsSsize_t(__pyx_t_5); if (unlikely((__pyx_t_9 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 151, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_v_dim = __pyx_t_9;
__pyx_v_idx = __pyx_t_8;
__pyx_t_8 = (__pyx_t_8 + 1);
/* "View.MemoryView":152
*
* for idx, dim in enumerate(shape):
* if dim <= 0: # <<<<<<<<<<<<<<
* raise ValueError("Invalid shape in axis %d: %d." % (idx, dim))
* self._shape[idx] = dim
*/
__pyx_t_4 = ((__pyx_v_dim <= 0) != 0);
if (unlikely(__pyx_t_4)) {
/* "View.MemoryView":153
* for idx, dim in enumerate(shape):
* if dim <= 0:
* raise ValueError("Invalid shape in axis %d: %d." % (idx, dim)) # <<<<<<<<<<<<<<
* self._shape[idx] = dim
*
*/
__pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_idx); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 153, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_6 = PyInt_FromSsize_t(__pyx_v_dim); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 153, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__pyx_t_10 = PyTuple_New(2); if (unlikely(!__pyx_t_10)) __PYX_ERR(2, 153, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_10);
__Pyx_GIVEREF(__pyx_t_5);
PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_5);
__Pyx_GIVEREF(__pyx_t_6);
PyTuple_SET_ITEM(__pyx_t_10, 1, __pyx_t_6);
__pyx_t_5 = 0;
__pyx_t_6 = 0;
__pyx_t_6 = __Pyx_PyString_Format(__pyx_kp_s_Invalid_shape_in_axis_d_d, __pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 153, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
__pyx_t_10 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_6); if (unlikely(!__pyx_t_10)) __PYX_ERR(2, 153, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_10);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_Raise(__pyx_t_10, 0, 0, 0);
__Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
__PYX_ERR(2, 153, __pyx_L1_error)
/* "View.MemoryView":152
*
* for idx, dim in enumerate(shape):
* if dim <= 0: # <<<<<<<<<<<<<<
* raise ValueError("Invalid shape in axis %d: %d." % (idx, dim))
* self._shape[idx] = dim
*/
}
/* "View.MemoryView":154
* if dim <= 0:
* raise ValueError("Invalid shape in axis %d: %d." % (idx, dim))
* self._shape[idx] = dim # <<<<<<<<<<<<<<
*
* cdef char order
*/
(__pyx_v_self->_shape[__pyx_v_idx]) = __pyx_v_dim;
/* "View.MemoryView":151
*
*
* for idx, dim in enumerate(shape): # <<<<<<<<<<<<<<
* if dim <= 0:
* raise ValueError("Invalid shape in axis %d: %d." % (idx, dim))
*/
}
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "View.MemoryView":157
*
* cdef char order
* if mode == 'fortran': # <<<<<<<<<<<<<<
* order = b'F'
* self.mode = u'fortran'
*/
__pyx_t_4 = (__Pyx_PyString_Equals(__pyx_v_mode, __pyx_n_s_fortran, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(2, 157, __pyx_L1_error)
if (__pyx_t_4) {
/* "View.MemoryView":158
* cdef char order
* if mode == 'fortran':
* order = b'F' # <<<<<<<<<<<<<<
* self.mode = u'fortran'
* elif mode == 'c':
*/
__pyx_v_order = 'F';
/* "View.MemoryView":159
* if mode == 'fortran':
* order = b'F'
* self.mode = u'fortran' # <<<<<<<<<<<<<<
* elif mode == 'c':
* order = b'C'
*/
__Pyx_INCREF(__pyx_n_u_fortran);
__Pyx_GIVEREF(__pyx_n_u_fortran);
__Pyx_GOTREF(__pyx_v_self->mode);
__Pyx_DECREF(__pyx_v_self->mode);
__pyx_v_self->mode = __pyx_n_u_fortran;
/* "View.MemoryView":157
*
* cdef char order
* if mode == 'fortran': # <<<<<<<<<<<<<<
* order = b'F'
* self.mode = u'fortran'
*/
goto __pyx_L10;
}
/* "View.MemoryView":160
* order = b'F'
* self.mode = u'fortran'
* elif mode == 'c': # <<<<<<<<<<<<<<
* order = b'C'
* self.mode = u'c'
*/
__pyx_t_4 = (__Pyx_PyString_Equals(__pyx_v_mode, __pyx_n_s_c, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(2, 160, __pyx_L1_error)
if (likely(__pyx_t_4)) {
/* "View.MemoryView":161
* self.mode = u'fortran'
* elif mode == 'c':
* order = b'C' # <<<<<<<<<<<<<<
* self.mode = u'c'
* else:
*/
__pyx_v_order = 'C';
/* "View.MemoryView":162
* elif mode == 'c':
* order = b'C'
* self.mode = u'c' # <<<<<<<<<<<<<<
* else:
* raise ValueError("Invalid mode, expected 'c' or 'fortran', got %s" % mode)
*/
__Pyx_INCREF(__pyx_n_u_c);
__Pyx_GIVEREF(__pyx_n_u_c);
__Pyx_GOTREF(__pyx_v_self->mode);
__Pyx_DECREF(__pyx_v_self->mode);
__pyx_v_self->mode = __pyx_n_u_c;
/* "View.MemoryView":160
* order = b'F'
* self.mode = u'fortran'
* elif mode == 'c': # <<<<<<<<<<<<<<
* order = b'C'
* self.mode = u'c'
*/
goto __pyx_L10;
}
/* "View.MemoryView":164
* self.mode = u'c'
* else:
* raise ValueError("Invalid mode, expected 'c' or 'fortran', got %s" % mode) # <<<<<<<<<<<<<<
*
* self.len = fill_contig_strides_array(self._shape, self._strides,
*/
/*else*/ {
__pyx_t_3 = __Pyx_PyString_FormatSafe(__pyx_kp_s_Invalid_mode_expected_c_or_fortr, __pyx_v_mode); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 164, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_10 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_10)) __PYX_ERR(2, 164, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_10);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_Raise(__pyx_t_10, 0, 0, 0);
__Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
__PYX_ERR(2, 164, __pyx_L1_error)
}
__pyx_L10:;
/* "View.MemoryView":166
* raise ValueError("Invalid mode, expected 'c' or 'fortran', got %s" % mode)
*
* self.len = fill_contig_strides_array(self._shape, self._strides, # <<<<<<<<<<<<<<
* itemsize, self.ndim, order)
*
*/
__pyx_v_self->len = __pyx_fill_contig_strides_array(__pyx_v_self->_shape, __pyx_v_self->_strides, __pyx_v_itemsize, __pyx_v_self->ndim, __pyx_v_order);
/* "View.MemoryView":169
* itemsize, self.ndim, order)
*
* self.free_data = allocate_buffer # <<<<<<<<<<<<<<
* self.dtype_is_object = format == b'O'
* if allocate_buffer:
*/
__pyx_v_self->free_data = __pyx_v_allocate_buffer;
/* "View.MemoryView":170
*
* self.free_data = allocate_buffer
* self.dtype_is_object = format == b'O' # <<<<<<<<<<<<<<
* if allocate_buffer:
*
*/
__pyx_t_10 = PyObject_RichCompare(__pyx_v_format, __pyx_n_b_O, Py_EQ); __Pyx_XGOTREF(__pyx_t_10); if (unlikely(!__pyx_t_10)) __PYX_ERR(2, 170, __pyx_L1_error)
__pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_10); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 170, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
__pyx_v_self->dtype_is_object = __pyx_t_4;
/* "View.MemoryView":171
* self.free_data = allocate_buffer
* self.dtype_is_object = format == b'O'
* if allocate_buffer: # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_4 = (__pyx_v_allocate_buffer != 0);
if (__pyx_t_4) {
/* "View.MemoryView":174
*
*
* self.data = <char *>malloc(self.len) # <<<<<<<<<<<<<<
* if not self.data:
* raise MemoryError("unable to allocate array data.")
*/
__pyx_v_self->data = ((char *)malloc(__pyx_v_self->len));
/* "View.MemoryView":175
*
* self.data = <char *>malloc(self.len)
* if not self.data: # <<<<<<<<<<<<<<
* raise MemoryError("unable to allocate array data.")
*
*/
__pyx_t_4 = ((!(__pyx_v_self->data != 0)) != 0);
if (unlikely(__pyx_t_4)) {
/* "View.MemoryView":176
* self.data = <char *>malloc(self.len)
* if not self.data:
* raise MemoryError("unable to allocate array data.") # <<<<<<<<<<<<<<
*
* if self.dtype_is_object:
*/
__pyx_t_10 = __Pyx_PyObject_Call(__pyx_builtin_MemoryError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(2, 176, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_10);
__Pyx_Raise(__pyx_t_10, 0, 0, 0);
__Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
__PYX_ERR(2, 176, __pyx_L1_error)
/* "View.MemoryView":175
*
* self.data = <char *>malloc(self.len)
* if not self.data: # <<<<<<<<<<<<<<
* raise MemoryError("unable to allocate array data.")
*
*/
}
/* "View.MemoryView":178
* raise MemoryError("unable to allocate array data.")
*
* if self.dtype_is_object: # <<<<<<<<<<<<<<
* p = <PyObject **> self.data
* for i in range(self.len / itemsize):
*/
__pyx_t_4 = (__pyx_v_self->dtype_is_object != 0);
if (__pyx_t_4) {
/* "View.MemoryView":179
*
* if self.dtype_is_object:
* p = <PyObject **> self.data # <<<<<<<<<<<<<<
* for i in range(self.len / itemsize):
* p[i] = Py_None
*/
__pyx_v_p = ((PyObject **)__pyx_v_self->data);
/* "View.MemoryView":180
* if self.dtype_is_object:
* p = <PyObject **> self.data
* for i in range(self.len / itemsize): # <<<<<<<<<<<<<<
* p[i] = Py_None
* Py_INCREF(Py_None)
*/
if (unlikely(__pyx_v_itemsize == 0)) {
PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero");
__PYX_ERR(2, 180, __pyx_L1_error)
}
else if (sizeof(Py_ssize_t) == sizeof(long) && (!(((Py_ssize_t)-1) > 0)) && unlikely(__pyx_v_itemsize == (Py_ssize_t)-1) && unlikely(UNARY_NEG_WOULD_OVERFLOW(__pyx_v_self->len))) {
PyErr_SetString(PyExc_OverflowError, "value too large to perform division");
__PYX_ERR(2, 180, __pyx_L1_error)
}
__pyx_t_1 = __Pyx_div_Py_ssize_t(__pyx_v_self->len, __pyx_v_itemsize);
__pyx_t_9 = __pyx_t_1;
for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_9; __pyx_t_11+=1) {
__pyx_v_i = __pyx_t_11;
/* "View.MemoryView":181
* p = <PyObject **> self.data
* for i in range(self.len / itemsize):
* p[i] = Py_None # <<<<<<<<<<<<<<
* Py_INCREF(Py_None)
*
*/
(__pyx_v_p[__pyx_v_i]) = Py_None;
/* "View.MemoryView":182
* for i in range(self.len / itemsize):
* p[i] = Py_None
* Py_INCREF(Py_None) # <<<<<<<<<<<<<<
*
* @cname('getbuffer')
*/
Py_INCREF(Py_None);
}
/* "View.MemoryView":178
* raise MemoryError("unable to allocate array data.")
*
* if self.dtype_is_object: # <<<<<<<<<<<<<<
* p = <PyObject **> self.data
* for i in range(self.len / itemsize):
*/
}
/* "View.MemoryView":171
* self.free_data = allocate_buffer
* self.dtype_is_object = format == b'O'
* if allocate_buffer: # <<<<<<<<<<<<<<
*
*
*/
}
/* "View.MemoryView":122
* cdef bint dtype_is_object
*
* def __cinit__(array self, tuple shape, Py_ssize_t itemsize, format not None, # <<<<<<<<<<<<<<
* mode="c", bint allocate_buffer=True):
*
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_10);
__Pyx_AddTraceback("View.MemoryView.array.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_format);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":185
*
* @cname('getbuffer')
* def __getbuffer__(self, Py_buffer *info, int flags): # <<<<<<<<<<<<<<
* cdef int bufmode = -1
* if self.mode == u"c":
*/
/* Python wrapper */
static CYTHON_UNUSED int __pyx_array_getbuffer(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/
static CYTHON_UNUSED int __pyx_array_getbuffer(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0);
__pyx_r = __pyx_array___pyx_pf_15View_dot_MemoryView_5array_2__getbuffer__(((struct __pyx_array_obj *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_array___pyx_pf_15View_dot_MemoryView_5array_2__getbuffer__(struct __pyx_array_obj *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
int __pyx_v_bufmode;
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
char *__pyx_t_4;
Py_ssize_t __pyx_t_5;
int __pyx_t_6;
Py_ssize_t *__pyx_t_7;
if (__pyx_v_info == NULL) {
PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete");
return -1;
}
__Pyx_RefNannySetupContext("__getbuffer__", 0);
__pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None);
__Pyx_GIVEREF(__pyx_v_info->obj);
/* "View.MemoryView":186
* @cname('getbuffer')
* def __getbuffer__(self, Py_buffer *info, int flags):
* cdef int bufmode = -1 # <<<<<<<<<<<<<<
* if self.mode == u"c":
* bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
*/
__pyx_v_bufmode = -1;
/* "View.MemoryView":187
* def __getbuffer__(self, Py_buffer *info, int flags):
* cdef int bufmode = -1
* if self.mode == u"c": # <<<<<<<<<<<<<<
* bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* elif self.mode == u"fortran":
*/
__pyx_t_1 = (__Pyx_PyUnicode_Equals(__pyx_v_self->mode, __pyx_n_u_c, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(2, 187, __pyx_L1_error)
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "View.MemoryView":188
* cdef int bufmode = -1
* if self.mode == u"c":
* bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS # <<<<<<<<<<<<<<
* elif self.mode == u"fortran":
* bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
*/
__pyx_v_bufmode = (PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS);
/* "View.MemoryView":187
* def __getbuffer__(self, Py_buffer *info, int flags):
* cdef int bufmode = -1
* if self.mode == u"c": # <<<<<<<<<<<<<<
* bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* elif self.mode == u"fortran":
*/
goto __pyx_L3;
}
/* "View.MemoryView":189
* if self.mode == u"c":
* bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* elif self.mode == u"fortran": # <<<<<<<<<<<<<<
* bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* if not (flags & bufmode):
*/
__pyx_t_2 = (__Pyx_PyUnicode_Equals(__pyx_v_self->mode, __pyx_n_u_fortran, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 189, __pyx_L1_error)
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* "View.MemoryView":190
* bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* elif self.mode == u"fortran":
* bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS # <<<<<<<<<<<<<<
* if not (flags & bufmode):
* raise ValueError("Can only create a buffer that is contiguous in memory.")
*/
__pyx_v_bufmode = (PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS);
/* "View.MemoryView":189
* if self.mode == u"c":
* bufmode = PyBUF_C_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* elif self.mode == u"fortran": # <<<<<<<<<<<<<<
* bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* if not (flags & bufmode):
*/
}
__pyx_L3:;
/* "View.MemoryView":191
* elif self.mode == u"fortran":
* bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* if not (flags & bufmode): # <<<<<<<<<<<<<<
* raise ValueError("Can only create a buffer that is contiguous in memory.")
* info.buf = self.data
*/
__pyx_t_1 = ((!((__pyx_v_flags & __pyx_v_bufmode) != 0)) != 0);
if (unlikely(__pyx_t_1)) {
/* "View.MemoryView":192
* bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* if not (flags & bufmode):
* raise ValueError("Can only create a buffer that is contiguous in memory.") # <<<<<<<<<<<<<<
* info.buf = self.data
* info.len = self.len
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__13, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 192, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(2, 192, __pyx_L1_error)
/* "View.MemoryView":191
* elif self.mode == u"fortran":
* bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* if not (flags & bufmode): # <<<<<<<<<<<<<<
* raise ValueError("Can only create a buffer that is contiguous in memory.")
* info.buf = self.data
*/
}
/* "View.MemoryView":193
* if not (flags & bufmode):
* raise ValueError("Can only create a buffer that is contiguous in memory.")
* info.buf = self.data # <<<<<<<<<<<<<<
* info.len = self.len
* info.ndim = self.ndim
*/
__pyx_t_4 = __pyx_v_self->data;
__pyx_v_info->buf = __pyx_t_4;
/* "View.MemoryView":194
* raise ValueError("Can only create a buffer that is contiguous in memory.")
* info.buf = self.data
* info.len = self.len # <<<<<<<<<<<<<<
* info.ndim = self.ndim
* info.shape = self._shape
*/
__pyx_t_5 = __pyx_v_self->len;
__pyx_v_info->len = __pyx_t_5;
/* "View.MemoryView":195
* info.buf = self.data
* info.len = self.len
* info.ndim = self.ndim # <<<<<<<<<<<<<<
* info.shape = self._shape
* info.strides = self._strides
*/
__pyx_t_6 = __pyx_v_self->ndim;
__pyx_v_info->ndim = __pyx_t_6;
/* "View.MemoryView":196
* info.len = self.len
* info.ndim = self.ndim
* info.shape = self._shape # <<<<<<<<<<<<<<
* info.strides = self._strides
* info.suboffsets = NULL
*/
__pyx_t_7 = __pyx_v_self->_shape;
__pyx_v_info->shape = __pyx_t_7;
/* "View.MemoryView":197
* info.ndim = self.ndim
* info.shape = self._shape
* info.strides = self._strides # <<<<<<<<<<<<<<
* info.suboffsets = NULL
* info.itemsize = self.itemsize
*/
__pyx_t_7 = __pyx_v_self->_strides;
__pyx_v_info->strides = __pyx_t_7;
/* "View.MemoryView":198
* info.shape = self._shape
* info.strides = self._strides
* info.suboffsets = NULL # <<<<<<<<<<<<<<
* info.itemsize = self.itemsize
* info.readonly = 0
*/
__pyx_v_info->suboffsets = NULL;
/* "View.MemoryView":199
* info.strides = self._strides
* info.suboffsets = NULL
* info.itemsize = self.itemsize # <<<<<<<<<<<<<<
* info.readonly = 0
*
*/
__pyx_t_5 = __pyx_v_self->itemsize;
__pyx_v_info->itemsize = __pyx_t_5;
/* "View.MemoryView":200
* info.suboffsets = NULL
* info.itemsize = self.itemsize
* info.readonly = 0 # <<<<<<<<<<<<<<
*
* if flags & PyBUF_FORMAT:
*/
__pyx_v_info->readonly = 0;
/* "View.MemoryView":202
* info.readonly = 0
*
* if flags & PyBUF_FORMAT: # <<<<<<<<<<<<<<
* info.format = self.format
* else:
*/
__pyx_t_1 = ((__pyx_v_flags & PyBUF_FORMAT) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":203
*
* if flags & PyBUF_FORMAT:
* info.format = self.format # <<<<<<<<<<<<<<
* else:
* info.format = NULL
*/
__pyx_t_4 = __pyx_v_self->format;
__pyx_v_info->format = __pyx_t_4;
/* "View.MemoryView":202
* info.readonly = 0
*
* if flags & PyBUF_FORMAT: # <<<<<<<<<<<<<<
* info.format = self.format
* else:
*/
goto __pyx_L5;
}
/* "View.MemoryView":205
* info.format = self.format
* else:
* info.format = NULL # <<<<<<<<<<<<<<
*
* info.obj = self
*/
/*else*/ {
__pyx_v_info->format = NULL;
}
__pyx_L5:;
/* "View.MemoryView":207
* info.format = NULL
*
* info.obj = self # <<<<<<<<<<<<<<
*
* __pyx_getbuffer = capsule(<void *> &__pyx_array_getbuffer, "getbuffer(obj, view, flags)")
*/
__Pyx_INCREF(((PyObject *)__pyx_v_self));
__Pyx_GIVEREF(((PyObject *)__pyx_v_self));
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj);
__pyx_v_info->obj = ((PyObject *)__pyx_v_self);
/* "View.MemoryView":185
*
* @cname('getbuffer')
* def __getbuffer__(self, Py_buffer *info, int flags): # <<<<<<<<<<<<<<
* cdef int bufmode = -1
* if self.mode == u"c":
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.array.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
if (__pyx_v_info->obj != NULL) {
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
}
goto __pyx_L2;
__pyx_L0:;
if (__pyx_v_info->obj == Py_None) {
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
}
__pyx_L2:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":211
* __pyx_getbuffer = capsule(<void *> &__pyx_array_getbuffer, "getbuffer(obj, view, flags)")
*
* def __dealloc__(array self): # <<<<<<<<<<<<<<
* if self.callback_free_data != NULL:
* self.callback_free_data(self.data)
*/
/* Python wrapper */
static void __pyx_array___dealloc__(PyObject *__pyx_v_self); /*proto*/
static void __pyx_array___dealloc__(PyObject *__pyx_v_self) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0);
__pyx_array___pyx_pf_15View_dot_MemoryView_5array_4__dealloc__(((struct __pyx_array_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
}
static void __pyx_array___pyx_pf_15View_dot_MemoryView_5array_4__dealloc__(struct __pyx_array_obj *__pyx_v_self) {
__Pyx_RefNannyDeclarations
int __pyx_t_1;
__Pyx_RefNannySetupContext("__dealloc__", 0);
/* "View.MemoryView":212
*
* def __dealloc__(array self):
* if self.callback_free_data != NULL: # <<<<<<<<<<<<<<
* self.callback_free_data(self.data)
* elif self.free_data:
*/
__pyx_t_1 = ((__pyx_v_self->callback_free_data != NULL) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":213
* def __dealloc__(array self):
* if self.callback_free_data != NULL:
* self.callback_free_data(self.data) # <<<<<<<<<<<<<<
* elif self.free_data:
* if self.dtype_is_object:
*/
__pyx_v_self->callback_free_data(__pyx_v_self->data);
/* "View.MemoryView":212
*
* def __dealloc__(array self):
* if self.callback_free_data != NULL: # <<<<<<<<<<<<<<
* self.callback_free_data(self.data)
* elif self.free_data:
*/
goto __pyx_L3;
}
/* "View.MemoryView":214
* if self.callback_free_data != NULL:
* self.callback_free_data(self.data)
* elif self.free_data: # <<<<<<<<<<<<<<
* if self.dtype_is_object:
* refcount_objects_in_slice(self.data, self._shape,
*/
__pyx_t_1 = (__pyx_v_self->free_data != 0);
if (__pyx_t_1) {
/* "View.MemoryView":215
* self.callback_free_data(self.data)
* elif self.free_data:
* if self.dtype_is_object: # <<<<<<<<<<<<<<
* refcount_objects_in_slice(self.data, self._shape,
* self._strides, self.ndim, False)
*/
__pyx_t_1 = (__pyx_v_self->dtype_is_object != 0);
if (__pyx_t_1) {
/* "View.MemoryView":216
* elif self.free_data:
* if self.dtype_is_object:
* refcount_objects_in_slice(self.data, self._shape, # <<<<<<<<<<<<<<
* self._strides, self.ndim, False)
* free(self.data)
*/
__pyx_memoryview_refcount_objects_in_slice(__pyx_v_self->data, __pyx_v_self->_shape, __pyx_v_self->_strides, __pyx_v_self->ndim, 0);
/* "View.MemoryView":215
* self.callback_free_data(self.data)
* elif self.free_data:
* if self.dtype_is_object: # <<<<<<<<<<<<<<
* refcount_objects_in_slice(self.data, self._shape,
* self._strides, self.ndim, False)
*/
}
/* "View.MemoryView":218
* refcount_objects_in_slice(self.data, self._shape,
* self._strides, self.ndim, False)
* free(self.data) # <<<<<<<<<<<<<<
* PyObject_Free(self._shape)
*
*/
free(__pyx_v_self->data);
/* "View.MemoryView":214
* if self.callback_free_data != NULL:
* self.callback_free_data(self.data)
* elif self.free_data: # <<<<<<<<<<<<<<
* if self.dtype_is_object:
* refcount_objects_in_slice(self.data, self._shape,
*/
}
__pyx_L3:;
/* "View.MemoryView":219
* self._strides, self.ndim, False)
* free(self.data)
* PyObject_Free(self._shape) # <<<<<<<<<<<<<<
*
* @property
*/
PyObject_Free(__pyx_v_self->_shape);
/* "View.MemoryView":211
* __pyx_getbuffer = capsule(<void *> &__pyx_array_getbuffer, "getbuffer(obj, view, flags)")
*
* def __dealloc__(array self): # <<<<<<<<<<<<<<
* if self.callback_free_data != NULL:
* self.callback_free_data(self.data)
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
}
/* "View.MemoryView":222
*
* @property
* def memview(self): # <<<<<<<<<<<<<<
* return self.get_memview()
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_5array_7memview_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_5array_7memview_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_5array_7memview___get__(((struct __pyx_array_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_5array_7memview___get__(struct __pyx_array_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":223
* @property
* def memview(self):
* return self.get_memview() # <<<<<<<<<<<<<<
*
* @cname('get_memview')
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = ((struct __pyx_vtabstruct_array *)__pyx_v_self->__pyx_vtab)->get_memview(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 223, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "View.MemoryView":222
*
* @property
* def memview(self): # <<<<<<<<<<<<<<
* return self.get_memview()
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.array.memview.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":226
*
* @cname('get_memview')
* cdef get_memview(self): # <<<<<<<<<<<<<<
* flags = PyBUF_ANY_CONTIGUOUS|PyBUF_FORMAT|PyBUF_WRITABLE
* return memoryview(self, flags, self.dtype_is_object)
*/
static PyObject *__pyx_array_get_memview(struct __pyx_array_obj *__pyx_v_self) {
int __pyx_v_flags;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
__Pyx_RefNannySetupContext("get_memview", 0);
/* "View.MemoryView":227
* @cname('get_memview')
* cdef get_memview(self):
* flags = PyBUF_ANY_CONTIGUOUS|PyBUF_FORMAT|PyBUF_WRITABLE # <<<<<<<<<<<<<<
* return memoryview(self, flags, self.dtype_is_object)
*
*/
__pyx_v_flags = ((PyBUF_ANY_CONTIGUOUS | PyBUF_FORMAT) | PyBUF_WRITABLE);
/* "View.MemoryView":228
* cdef get_memview(self):
* flags = PyBUF_ANY_CONTIGUOUS|PyBUF_FORMAT|PyBUF_WRITABLE
* return memoryview(self, flags, self.dtype_is_object) # <<<<<<<<<<<<<<
*
* def __len__(self):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_flags); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 228, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_self->dtype_is_object); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 228, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 228, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_INCREF(((PyObject *)__pyx_v_self));
__Pyx_GIVEREF(((PyObject *)__pyx_v_self));
PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_v_self));
__Pyx_GIVEREF(__pyx_t_1);
PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_2);
__pyx_t_1 = 0;
__pyx_t_2 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)__pyx_memoryview_type), __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 228, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":226
*
* @cname('get_memview')
* cdef get_memview(self): # <<<<<<<<<<<<<<
* flags = PyBUF_ANY_CONTIGUOUS|PyBUF_FORMAT|PyBUF_WRITABLE
* return memoryview(self, flags, self.dtype_is_object)
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.array.get_memview", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":230
* return memoryview(self, flags, self.dtype_is_object)
*
* def __len__(self): # <<<<<<<<<<<<<<
* return self._shape[0]
*
*/
/* Python wrapper */
static Py_ssize_t __pyx_array___len__(PyObject *__pyx_v_self); /*proto*/
static Py_ssize_t __pyx_array___len__(PyObject *__pyx_v_self) {
Py_ssize_t __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__len__ (wrapper)", 0);
__pyx_r = __pyx_array___pyx_pf_15View_dot_MemoryView_5array_6__len__(((struct __pyx_array_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static Py_ssize_t __pyx_array___pyx_pf_15View_dot_MemoryView_5array_6__len__(struct __pyx_array_obj *__pyx_v_self) {
Py_ssize_t __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__len__", 0);
/* "View.MemoryView":231
*
* def __len__(self):
* return self._shape[0] # <<<<<<<<<<<<<<
*
* def __getattr__(self, attr):
*/
__pyx_r = (__pyx_v_self->_shape[0]);
goto __pyx_L0;
/* "View.MemoryView":230
* return memoryview(self, flags, self.dtype_is_object)
*
* def __len__(self): # <<<<<<<<<<<<<<
* return self._shape[0]
*
*/
/* function exit code */
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":233
* return self._shape[0]
*
* def __getattr__(self, attr): # <<<<<<<<<<<<<<
* return getattr(self.memview, attr)
*
*/
/* Python wrapper */
static PyObject *__pyx_array___getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_attr); /*proto*/
static PyObject *__pyx_array___getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_attr) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__getattr__ (wrapper)", 0);
__pyx_r = __pyx_array___pyx_pf_15View_dot_MemoryView_5array_8__getattr__(((struct __pyx_array_obj *)__pyx_v_self), ((PyObject *)__pyx_v_attr));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_array___pyx_pf_15View_dot_MemoryView_5array_8__getattr__(struct __pyx_array_obj *__pyx_v_self, PyObject *__pyx_v_attr) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("__getattr__", 0);
/* "View.MemoryView":234
*
* def __getattr__(self, attr):
* return getattr(self.memview, attr) # <<<<<<<<<<<<<<
*
* def __getitem__(self, item):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_memview); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 234, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = __Pyx_GetAttr(__pyx_t_1, __pyx_v_attr); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 234, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":233
* return self._shape[0]
*
* def __getattr__(self, attr): # <<<<<<<<<<<<<<
* return getattr(self.memview, attr)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView.array.__getattr__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":236
* return getattr(self.memview, attr)
*
* def __getitem__(self, item): # <<<<<<<<<<<<<<
* return self.memview[item]
*
*/
/* Python wrapper */
static PyObject *__pyx_array___getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/
static PyObject *__pyx_array___getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_item) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0);
__pyx_r = __pyx_array___pyx_pf_15View_dot_MemoryView_5array_10__getitem__(((struct __pyx_array_obj *)__pyx_v_self), ((PyObject *)__pyx_v_item));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_array___pyx_pf_15View_dot_MemoryView_5array_10__getitem__(struct __pyx_array_obj *__pyx_v_self, PyObject *__pyx_v_item) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("__getitem__", 0);
/* "View.MemoryView":237
*
* def __getitem__(self, item):
* return self.memview[item] # <<<<<<<<<<<<<<
*
* def __setitem__(self, item, value):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_memview); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 237, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = __Pyx_PyObject_GetItem(__pyx_t_1, __pyx_v_item); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 237, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":236
* return getattr(self.memview, attr)
*
* def __getitem__(self, item): # <<<<<<<<<<<<<<
* return self.memview[item]
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView.array.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":239
* return self.memview[item]
*
* def __setitem__(self, item, value): # <<<<<<<<<<<<<<
* self.memview[item] = value
*
*/
/* Python wrapper */
static int __pyx_array___setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_value); /*proto*/
static int __pyx_array___setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_value) {
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0);
__pyx_r = __pyx_array___pyx_pf_15View_dot_MemoryView_5array_12__setitem__(((struct __pyx_array_obj *)__pyx_v_self), ((PyObject *)__pyx_v_item), ((PyObject *)__pyx_v_value));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_array___pyx_pf_15View_dot_MemoryView_5array_12__setitem__(struct __pyx_array_obj *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_value) {
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__setitem__", 0);
/* "View.MemoryView":240
*
* def __setitem__(self, item, value):
* self.memview[item] = value # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_memview); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 240, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (unlikely(PyObject_SetItem(__pyx_t_1, __pyx_v_item, __pyx_v_value) < 0)) __PYX_ERR(2, 240, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "View.MemoryView":239
* return self.memview[item]
*
* def __setitem__(self, item, value): # <<<<<<<<<<<<<<
* self.memview[item] = value
*
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.array.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* Python wrapper */
static PyObject *__pyx_pw___pyx_array_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw___pyx_array_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf___pyx_array___reduce_cython__(((struct __pyx_array_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf___pyx_array___reduce_cython__(CYTHON_UNUSED struct __pyx_array_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__14, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(2, 2, __pyx_L1_error)
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.array.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* Python wrapper */
static PyObject *__pyx_pw___pyx_array_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
static PyObject *__pyx_pw___pyx_array_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf___pyx_array_2__setstate_cython__(((struct __pyx_array_obj *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf___pyx_array_2__setstate_cython__(CYTHON_UNUSED struct __pyx_array_obj *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(2, 4, __pyx_L1_error)
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.array.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":244
*
* @cname("__pyx_array_new")
* cdef array array_cwrapper(tuple shape, Py_ssize_t itemsize, char *format, # <<<<<<<<<<<<<<
* char *mode, char *buf):
* cdef array result
*/
static struct __pyx_array_obj *__pyx_array_new(PyObject *__pyx_v_shape, Py_ssize_t __pyx_v_itemsize, char *__pyx_v_format, char *__pyx_v_mode, char *__pyx_v_buf) {
struct __pyx_array_obj *__pyx_v_result = 0;
struct __pyx_array_obj *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
__Pyx_RefNannySetupContext("array_cwrapper", 0);
/* "View.MemoryView":248
* cdef array result
*
* if buf == NULL: # <<<<<<<<<<<<<<
* result = array(shape, itemsize, format, mode.decode('ASCII'))
* else:
*/
__pyx_t_1 = ((__pyx_v_buf == NULL) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":249
*
* if buf == NULL:
* result = array(shape, itemsize, format, mode.decode('ASCII')) # <<<<<<<<<<<<<<
* else:
* result = array(shape, itemsize, format, mode.decode('ASCII'),
*/
__pyx_t_2 = PyInt_FromSsize_t(__pyx_v_itemsize); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 249, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = __Pyx_PyBytes_FromString(__pyx_v_format); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 249, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = __Pyx_decode_c_string(__pyx_v_mode, 0, strlen(__pyx_v_mode), NULL, NULL, PyUnicode_DecodeASCII); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 249, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_5 = PyTuple_New(4); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 249, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_INCREF(__pyx_v_shape);
__Pyx_GIVEREF(__pyx_v_shape);
PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_shape);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2);
__Pyx_GIVEREF(__pyx_t_3);
PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_3);
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_5, 3, __pyx_t_4);
__pyx_t_2 = 0;
__pyx_t_3 = 0;
__pyx_t_4 = 0;
__pyx_t_4 = __Pyx_PyObject_Call(((PyObject *)__pyx_array_type), __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 249, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_v_result = ((struct __pyx_array_obj *)__pyx_t_4);
__pyx_t_4 = 0;
/* "View.MemoryView":248
* cdef array result
*
* if buf == NULL: # <<<<<<<<<<<<<<
* result = array(shape, itemsize, format, mode.decode('ASCII'))
* else:
*/
goto __pyx_L3;
}
/* "View.MemoryView":251
* result = array(shape, itemsize, format, mode.decode('ASCII'))
* else:
* result = array(shape, itemsize, format, mode.decode('ASCII'), # <<<<<<<<<<<<<<
* allocate_buffer=False)
* result.data = buf
*/
/*else*/ {
__pyx_t_4 = PyInt_FromSsize_t(__pyx_v_itemsize); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 251, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_5 = __Pyx_PyBytes_FromString(__pyx_v_format); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 251, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_3 = __Pyx_decode_c_string(__pyx_v_mode, 0, strlen(__pyx_v_mode), NULL, NULL, PyUnicode_DecodeASCII); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 251, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_2 = PyTuple_New(4); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 251, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_INCREF(__pyx_v_shape);
__Pyx_GIVEREF(__pyx_v_shape);
PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_shape);
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_4);
__Pyx_GIVEREF(__pyx_t_5);
PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_5);
__Pyx_GIVEREF(__pyx_t_3);
PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_t_3);
__pyx_t_4 = 0;
__pyx_t_5 = 0;
__pyx_t_3 = 0;
/* "View.MemoryView":252
* else:
* result = array(shape, itemsize, format, mode.decode('ASCII'),
* allocate_buffer=False) # <<<<<<<<<<<<<<
* result.data = buf
*
*/
__pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 252, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_allocate_buffer, Py_False) < 0) __PYX_ERR(2, 252, __pyx_L1_error)
/* "View.MemoryView":251
* result = array(shape, itemsize, format, mode.decode('ASCII'))
* else:
* result = array(shape, itemsize, format, mode.decode('ASCII'), # <<<<<<<<<<<<<<
* allocate_buffer=False)
* result.data = buf
*/
__pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)__pyx_array_type), __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 251, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_v_result = ((struct __pyx_array_obj *)__pyx_t_5);
__pyx_t_5 = 0;
/* "View.MemoryView":253
* result = array(shape, itemsize, format, mode.decode('ASCII'),
* allocate_buffer=False)
* result.data = buf # <<<<<<<<<<<<<<
*
* return result
*/
__pyx_v_result->data = __pyx_v_buf;
}
__pyx_L3:;
/* "View.MemoryView":255
* result.data = buf
*
* return result # <<<<<<<<<<<<<<
*
*
*/
__Pyx_XDECREF(((PyObject *)__pyx_r));
__Pyx_INCREF(((PyObject *)__pyx_v_result));
__pyx_r = __pyx_v_result;
goto __pyx_L0;
/* "View.MemoryView":244
*
* @cname("__pyx_array_new")
* cdef array array_cwrapper(tuple shape, Py_ssize_t itemsize, char *format, # <<<<<<<<<<<<<<
* char *mode, char *buf):
* cdef array result
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView.array_cwrapper", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_result);
__Pyx_XGIVEREF((PyObject *)__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":281
* cdef class Enum(object):
* cdef object name
* def __init__(self, name): # <<<<<<<<<<<<<<
* self.name = name
* def __repr__(self):
*/
/* Python wrapper */
static int __pyx_MemviewEnum___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_MemviewEnum___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_name = 0;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_name,0};
PyObject* values[1] = {0};
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_name)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(2, 281, __pyx_L3_error)
}
} else if (PyTuple_GET_SIZE(__pyx_args) != 1) {
goto __pyx_L5_argtuple_error;
} else {
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
}
__pyx_v_name = values[0];
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 281, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("View.MemoryView.Enum.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return -1;
__pyx_L4_argument_unpacking_done:;
__pyx_r = __pyx_MemviewEnum___pyx_pf_15View_dot_MemoryView_4Enum___init__(((struct __pyx_MemviewEnum_obj *)__pyx_v_self), __pyx_v_name);
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_MemviewEnum___pyx_pf_15View_dot_MemoryView_4Enum___init__(struct __pyx_MemviewEnum_obj *__pyx_v_self, PyObject *__pyx_v_name) {
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__init__", 0);
/* "View.MemoryView":282
* cdef object name
* def __init__(self, name):
* self.name = name # <<<<<<<<<<<<<<
* def __repr__(self):
* return self.name
*/
__Pyx_INCREF(__pyx_v_name);
__Pyx_GIVEREF(__pyx_v_name);
__Pyx_GOTREF(__pyx_v_self->name);
__Pyx_DECREF(__pyx_v_self->name);
__pyx_v_self->name = __pyx_v_name;
/* "View.MemoryView":281
* cdef class Enum(object):
* cdef object name
* def __init__(self, name): # <<<<<<<<<<<<<<
* self.name = name
* def __repr__(self):
*/
/* function exit code */
__pyx_r = 0;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":283
* def __init__(self, name):
* self.name = name
* def __repr__(self): # <<<<<<<<<<<<<<
* return self.name
*
*/
/* Python wrapper */
static PyObject *__pyx_MemviewEnum___repr__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_MemviewEnum___repr__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__repr__ (wrapper)", 0);
__pyx_r = __pyx_MemviewEnum___pyx_pf_15View_dot_MemoryView_4Enum_2__repr__(((struct __pyx_MemviewEnum_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_MemviewEnum___pyx_pf_15View_dot_MemoryView_4Enum_2__repr__(struct __pyx_MemviewEnum_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__repr__", 0);
/* "View.MemoryView":284
* self.name = name
* def __repr__(self):
* return self.name # <<<<<<<<<<<<<<
*
* cdef generic = Enum("<strided and direct or indirect>")
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->name);
__pyx_r = __pyx_v_self->name;
goto __pyx_L0;
/* "View.MemoryView":283
* def __init__(self, name):
* self.name = name
* def __repr__(self): # <<<<<<<<<<<<<<
* return self.name
*
*/
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* cdef tuple state
* cdef object _dict
*/
/* Python wrapper */
static PyObject *__pyx_pw___pyx_MemviewEnum_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw___pyx_MemviewEnum_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf___pyx_MemviewEnum___reduce_cython__(((struct __pyx_MemviewEnum_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf___pyx_MemviewEnum___reduce_cython__(struct __pyx_MemviewEnum_obj *__pyx_v_self) {
PyObject *__pyx_v_state = 0;
PyObject *__pyx_v__dict = 0;
int __pyx_v_use_setstate;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_t_3;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":5
* cdef object _dict
* cdef bint use_setstate
* state = (self.name,) # <<<<<<<<<<<<<<
* _dict = getattr(self, '__dict__', None)
* if _dict is not None:
*/
__pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 5, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_INCREF(__pyx_v_self->name);
__Pyx_GIVEREF(__pyx_v_self->name);
PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->name);
__pyx_v_state = ((PyObject*)__pyx_t_1);
__pyx_t_1 = 0;
/* "(tree fragment)":6
* cdef bint use_setstate
* state = (self.name,)
* _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<<
* if _dict is not None:
* state += (_dict,)
*/
__pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 6, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v__dict = __pyx_t_1;
__pyx_t_1 = 0;
/* "(tree fragment)":7
* state = (self.name,)
* _dict = getattr(self, '__dict__', None)
* if _dict is not None: # <<<<<<<<<<<<<<
* state += (_dict,)
* use_setstate = True
*/
__pyx_t_2 = (__pyx_v__dict != Py_None);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "(tree fragment)":8
* _dict = getattr(self, '__dict__', None)
* if _dict is not None:
* state += (_dict,) # <<<<<<<<<<<<<<
* use_setstate = True
* else:
*/
__pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 8, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_INCREF(__pyx_v__dict);
__Pyx_GIVEREF(__pyx_v__dict);
PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict);
__pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 8, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4));
__pyx_t_4 = 0;
/* "(tree fragment)":9
* if _dict is not None:
* state += (_dict,)
* use_setstate = True # <<<<<<<<<<<<<<
* else:
* use_setstate = self.name is not None
*/
__pyx_v_use_setstate = 1;
/* "(tree fragment)":7
* state = (self.name,)
* _dict = getattr(self, '__dict__', None)
* if _dict is not None: # <<<<<<<<<<<<<<
* state += (_dict,)
* use_setstate = True
*/
goto __pyx_L3;
}
/* "(tree fragment)":11
* use_setstate = True
* else:
* use_setstate = self.name is not None # <<<<<<<<<<<<<<
* if use_setstate:
* return __pyx_unpickle_Enum, (type(self), 0xb068931, None), state
*/
/*else*/ {
__pyx_t_3 = (__pyx_v_self->name != Py_None);
__pyx_v_use_setstate = __pyx_t_3;
}
__pyx_L3:;
/* "(tree fragment)":12
* else:
* use_setstate = self.name is not None
* if use_setstate: # <<<<<<<<<<<<<<
* return __pyx_unpickle_Enum, (type(self), 0xb068931, None), state
* else:
*/
__pyx_t_3 = (__pyx_v_use_setstate != 0);
if (__pyx_t_3) {
/* "(tree fragment)":13
* use_setstate = self.name is not None
* if use_setstate:
* return __pyx_unpickle_Enum, (type(self), 0xb068931, None), state # <<<<<<<<<<<<<<
* else:
* return __pyx_unpickle_Enum, (type(self), 0xb068931, state)
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_pyx_unpickle_Enum); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 13, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 13, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
__Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
__Pyx_INCREF(__pyx_int_184977713);
__Pyx_GIVEREF(__pyx_int_184977713);
PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_184977713);
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None);
__pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 13, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4);
__Pyx_GIVEREF(__pyx_t_1);
PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1);
__Pyx_INCREF(__pyx_v_state);
__Pyx_GIVEREF(__pyx_v_state);
PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state);
__pyx_t_4 = 0;
__pyx_t_1 = 0;
__pyx_r = __pyx_t_5;
__pyx_t_5 = 0;
goto __pyx_L0;
/* "(tree fragment)":12
* else:
* use_setstate = self.name is not None
* if use_setstate: # <<<<<<<<<<<<<<
* return __pyx_unpickle_Enum, (type(self), 0xb068931, None), state
* else:
*/
}
/* "(tree fragment)":15
* return __pyx_unpickle_Enum, (type(self), 0xb068931, None), state
* else:
* return __pyx_unpickle_Enum, (type(self), 0xb068931, state) # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* __pyx_unpickle_Enum__set_state(self, __pyx_state)
*/
/*else*/ {
__Pyx_XDECREF(__pyx_r);
__Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_pyx_unpickle_Enum); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 15, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 15, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
__Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))));
__Pyx_INCREF(__pyx_int_184977713);
__Pyx_GIVEREF(__pyx_int_184977713);
PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_184977713);
__Pyx_INCREF(__pyx_v_state);
__Pyx_GIVEREF(__pyx_v_state);
PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state);
__pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 15, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_GIVEREF(__pyx_t_5);
PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5);
__Pyx_GIVEREF(__pyx_t_1);
PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1);
__pyx_t_5 = 0;
__pyx_t_1 = 0;
__pyx_r = __pyx_t_4;
__pyx_t_4 = 0;
goto __pyx_L0;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* cdef tuple state
* cdef object _dict
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView.Enum.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_state);
__Pyx_XDECREF(__pyx_v__dict);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":16
* else:
* return __pyx_unpickle_Enum, (type(self), 0xb068931, state)
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* __pyx_unpickle_Enum__set_state(self, __pyx_state)
*/
/* Python wrapper */
static PyObject *__pyx_pw___pyx_MemviewEnum_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
static PyObject *__pyx_pw___pyx_MemviewEnum_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf___pyx_MemviewEnum_2__setstate_cython__(((struct __pyx_MemviewEnum_obj *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf___pyx_MemviewEnum_2__setstate_cython__(struct __pyx_MemviewEnum_obj *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":17
* return __pyx_unpickle_Enum, (type(self), 0xb068931, state)
* def __setstate_cython__(self, __pyx_state):
* __pyx_unpickle_Enum__set_state(self, __pyx_state) # <<<<<<<<<<<<<<
*/
if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 17, __pyx_L1_error)
__pyx_t_1 = __pyx_unpickle_Enum__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 17, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "(tree fragment)":16
* else:
* return __pyx_unpickle_Enum, (type(self), 0xb068931, state)
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* __pyx_unpickle_Enum__set_state(self, __pyx_state)
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.Enum.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":298
*
* @cname('__pyx_align_pointer')
* cdef void *align_pointer(void *memory, size_t alignment) nogil: # <<<<<<<<<<<<<<
* "Align pointer memory on a given boundary"
* cdef Py_intptr_t aligned_p = <Py_intptr_t> memory
*/
static void *__pyx_align_pointer(void *__pyx_v_memory, size_t __pyx_v_alignment) {
Py_intptr_t __pyx_v_aligned_p;
size_t __pyx_v_offset;
void *__pyx_r;
int __pyx_t_1;
/* "View.MemoryView":300
* cdef void *align_pointer(void *memory, size_t alignment) nogil:
* "Align pointer memory on a given boundary"
* cdef Py_intptr_t aligned_p = <Py_intptr_t> memory # <<<<<<<<<<<<<<
* cdef size_t offset
*
*/
__pyx_v_aligned_p = ((Py_intptr_t)__pyx_v_memory);
/* "View.MemoryView":304
*
* with cython.cdivision(True):
* offset = aligned_p % alignment # <<<<<<<<<<<<<<
*
* if offset > 0:
*/
__pyx_v_offset = (__pyx_v_aligned_p % __pyx_v_alignment);
/* "View.MemoryView":306
* offset = aligned_p % alignment
*
* if offset > 0: # <<<<<<<<<<<<<<
* aligned_p += alignment - offset
*
*/
__pyx_t_1 = ((__pyx_v_offset > 0) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":307
*
* if offset > 0:
* aligned_p += alignment - offset # <<<<<<<<<<<<<<
*
* return <void *> aligned_p
*/
__pyx_v_aligned_p = (__pyx_v_aligned_p + (__pyx_v_alignment - __pyx_v_offset));
/* "View.MemoryView":306
* offset = aligned_p % alignment
*
* if offset > 0: # <<<<<<<<<<<<<<
* aligned_p += alignment - offset
*
*/
}
/* "View.MemoryView":309
* aligned_p += alignment - offset
*
* return <void *> aligned_p # <<<<<<<<<<<<<<
*
*
*/
__pyx_r = ((void *)__pyx_v_aligned_p);
goto __pyx_L0;
/* "View.MemoryView":298
*
* @cname('__pyx_align_pointer')
* cdef void *align_pointer(void *memory, size_t alignment) nogil: # <<<<<<<<<<<<<<
* "Align pointer memory on a given boundary"
* cdef Py_intptr_t aligned_p = <Py_intptr_t> memory
*/
/* function exit code */
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":345
* cdef __Pyx_TypeInfo *typeinfo
*
* def __cinit__(memoryview self, object obj, int flags, bint dtype_is_object=False): # <<<<<<<<<<<<<<
* self.obj = obj
* self.flags = flags
*/
/* Python wrapper */
static int __pyx_memoryview___cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_memoryview___cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_obj = 0;
int __pyx_v_flags;
int __pyx_v_dtype_is_object;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_obj,&__pyx_n_s_flags,&__pyx_n_s_dtype_is_object,0};
PyObject* values[3] = {0,0,0};
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
CYTHON_FALLTHROUGH;
case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
CYTHON_FALLTHROUGH;
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_obj)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
CYTHON_FALLTHROUGH;
case 1:
if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_flags)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 0, 2, 3, 1); __PYX_ERR(2, 345, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 2:
if (kw_args > 0) {
PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_dtype_is_object);
if (value) { values[2] = value; kw_args--; }
}
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(2, 345, __pyx_L3_error)
}
} else {
switch (PyTuple_GET_SIZE(__pyx_args)) {
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
CYTHON_FALLTHROUGH;
case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
break;
default: goto __pyx_L5_argtuple_error;
}
}
__pyx_v_obj = values[0];
__pyx_v_flags = __Pyx_PyInt_As_int(values[1]); if (unlikely((__pyx_v_flags == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 345, __pyx_L3_error)
if (values[2]) {
__pyx_v_dtype_is_object = __Pyx_PyObject_IsTrue(values[2]); if (unlikely((__pyx_v_dtype_is_object == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 345, __pyx_L3_error)
} else {
__pyx_v_dtype_is_object = ((int)0);
}
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("__cinit__", 0, 2, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 345, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("View.MemoryView.memoryview.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return -1;
__pyx_L4_argument_unpacking_done:;
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview___cinit__(((struct __pyx_memoryview_obj *)__pyx_v_self), __pyx_v_obj, __pyx_v_flags, __pyx_v_dtype_is_object);
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview___cinit__(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_obj, int __pyx_v_flags, int __pyx_v_dtype_is_object) {
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_t_4;
__Pyx_RefNannySetupContext("__cinit__", 0);
/* "View.MemoryView":346
*
* def __cinit__(memoryview self, object obj, int flags, bint dtype_is_object=False):
* self.obj = obj # <<<<<<<<<<<<<<
* self.flags = flags
* if type(self) is memoryview or obj is not None:
*/
__Pyx_INCREF(__pyx_v_obj);
__Pyx_GIVEREF(__pyx_v_obj);
__Pyx_GOTREF(__pyx_v_self->obj);
__Pyx_DECREF(__pyx_v_self->obj);
__pyx_v_self->obj = __pyx_v_obj;
/* "View.MemoryView":347
* def __cinit__(memoryview self, object obj, int flags, bint dtype_is_object=False):
* self.obj = obj
* self.flags = flags # <<<<<<<<<<<<<<
* if type(self) is memoryview or obj is not None:
* __Pyx_GetBuffer(obj, &self.view, flags)
*/
__pyx_v_self->flags = __pyx_v_flags;
/* "View.MemoryView":348
* self.obj = obj
* self.flags = flags
* if type(self) is memoryview or obj is not None: # <<<<<<<<<<<<<<
* __Pyx_GetBuffer(obj, &self.view, flags)
* if <PyObject *> self.view.obj == NULL:
*/
__pyx_t_2 = (((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))) == ((PyObject *)__pyx_memoryview_type));
__pyx_t_3 = (__pyx_t_2 != 0);
if (!__pyx_t_3) {
} else {
__pyx_t_1 = __pyx_t_3;
goto __pyx_L4_bool_binop_done;
}
__pyx_t_3 = (__pyx_v_obj != Py_None);
__pyx_t_2 = (__pyx_t_3 != 0);
__pyx_t_1 = __pyx_t_2;
__pyx_L4_bool_binop_done:;
if (__pyx_t_1) {
/* "View.MemoryView":349
* self.flags = flags
* if type(self) is memoryview or obj is not None:
* __Pyx_GetBuffer(obj, &self.view, flags) # <<<<<<<<<<<<<<
* if <PyObject *> self.view.obj == NULL:
* (<__pyx_buffer *> &self.view).obj = Py_None
*/
__pyx_t_4 = __Pyx_GetBuffer(__pyx_v_obj, (&__pyx_v_self->view), __pyx_v_flags); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 349, __pyx_L1_error)
/* "View.MemoryView":350
* if type(self) is memoryview or obj is not None:
* __Pyx_GetBuffer(obj, &self.view, flags)
* if <PyObject *> self.view.obj == NULL: # <<<<<<<<<<<<<<
* (<__pyx_buffer *> &self.view).obj = Py_None
* Py_INCREF(Py_None)
*/
__pyx_t_1 = ((((PyObject *)__pyx_v_self->view.obj) == NULL) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":351
* __Pyx_GetBuffer(obj, &self.view, flags)
* if <PyObject *> self.view.obj == NULL:
* (<__pyx_buffer *> &self.view).obj = Py_None # <<<<<<<<<<<<<<
* Py_INCREF(Py_None)
*
*/
((Py_buffer *)(&__pyx_v_self->view))->obj = Py_None;
/* "View.MemoryView":352
* if <PyObject *> self.view.obj == NULL:
* (<__pyx_buffer *> &self.view).obj = Py_None
* Py_INCREF(Py_None) # <<<<<<<<<<<<<<
*
* global __pyx_memoryview_thread_locks_used
*/
Py_INCREF(Py_None);
/* "View.MemoryView":350
* if type(self) is memoryview or obj is not None:
* __Pyx_GetBuffer(obj, &self.view, flags)
* if <PyObject *> self.view.obj == NULL: # <<<<<<<<<<<<<<
* (<__pyx_buffer *> &self.view).obj = Py_None
* Py_INCREF(Py_None)
*/
}
/* "View.MemoryView":348
* self.obj = obj
* self.flags = flags
* if type(self) is memoryview or obj is not None: # <<<<<<<<<<<<<<
* __Pyx_GetBuffer(obj, &self.view, flags)
* if <PyObject *> self.view.obj == NULL:
*/
}
/* "View.MemoryView":355
*
* global __pyx_memoryview_thread_locks_used
* if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED: # <<<<<<<<<<<<<<
* self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
* __pyx_memoryview_thread_locks_used += 1
*/
__pyx_t_1 = ((__pyx_memoryview_thread_locks_used < 8) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":356
* global __pyx_memoryview_thread_locks_used
* if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED:
* self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] # <<<<<<<<<<<<<<
* __pyx_memoryview_thread_locks_used += 1
* if self.lock is NULL:
*/
__pyx_v_self->lock = (__pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]);
/* "View.MemoryView":357
* if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED:
* self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
* __pyx_memoryview_thread_locks_used += 1 # <<<<<<<<<<<<<<
* if self.lock is NULL:
* self.lock = PyThread_allocate_lock()
*/
__pyx_memoryview_thread_locks_used = (__pyx_memoryview_thread_locks_used + 1);
/* "View.MemoryView":355
*
* global __pyx_memoryview_thread_locks_used
* if __pyx_memoryview_thread_locks_used < THREAD_LOCKS_PREALLOCATED: # <<<<<<<<<<<<<<
* self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
* __pyx_memoryview_thread_locks_used += 1
*/
}
/* "View.MemoryView":358
* self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
* __pyx_memoryview_thread_locks_used += 1
* if self.lock is NULL: # <<<<<<<<<<<<<<
* self.lock = PyThread_allocate_lock()
* if self.lock is NULL:
*/
__pyx_t_1 = ((__pyx_v_self->lock == NULL) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":359
* __pyx_memoryview_thread_locks_used += 1
* if self.lock is NULL:
* self.lock = PyThread_allocate_lock() # <<<<<<<<<<<<<<
* if self.lock is NULL:
* raise MemoryError
*/
__pyx_v_self->lock = PyThread_allocate_lock();
/* "View.MemoryView":360
* if self.lock is NULL:
* self.lock = PyThread_allocate_lock()
* if self.lock is NULL: # <<<<<<<<<<<<<<
* raise MemoryError
*
*/
__pyx_t_1 = ((__pyx_v_self->lock == NULL) != 0);
if (unlikely(__pyx_t_1)) {
/* "View.MemoryView":361
* self.lock = PyThread_allocate_lock()
* if self.lock is NULL:
* raise MemoryError # <<<<<<<<<<<<<<
*
* if flags & PyBUF_FORMAT:
*/
PyErr_NoMemory(); __PYX_ERR(2, 361, __pyx_L1_error)
/* "View.MemoryView":360
* if self.lock is NULL:
* self.lock = PyThread_allocate_lock()
* if self.lock is NULL: # <<<<<<<<<<<<<<
* raise MemoryError
*
*/
}
/* "View.MemoryView":358
* self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
* __pyx_memoryview_thread_locks_used += 1
* if self.lock is NULL: # <<<<<<<<<<<<<<
* self.lock = PyThread_allocate_lock()
* if self.lock is NULL:
*/
}
/* "View.MemoryView":363
* raise MemoryError
*
* if flags & PyBUF_FORMAT: # <<<<<<<<<<<<<<
* self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0')
* else:
*/
__pyx_t_1 = ((__pyx_v_flags & PyBUF_FORMAT) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":364
*
* if flags & PyBUF_FORMAT:
* self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0') # <<<<<<<<<<<<<<
* else:
* self.dtype_is_object = dtype_is_object
*/
__pyx_t_2 = (((__pyx_v_self->view.format[0]) == 'O') != 0);
if (__pyx_t_2) {
} else {
__pyx_t_1 = __pyx_t_2;
goto __pyx_L11_bool_binop_done;
}
__pyx_t_2 = (((__pyx_v_self->view.format[1]) == '\x00') != 0);
__pyx_t_1 = __pyx_t_2;
__pyx_L11_bool_binop_done:;
__pyx_v_self->dtype_is_object = __pyx_t_1;
/* "View.MemoryView":363
* raise MemoryError
*
* if flags & PyBUF_FORMAT: # <<<<<<<<<<<<<<
* self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0')
* else:
*/
goto __pyx_L10;
}
/* "View.MemoryView":366
* self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0')
* else:
* self.dtype_is_object = dtype_is_object # <<<<<<<<<<<<<<
*
* self.acquisition_count_aligned_p = <__pyx_atomic_int *> align_pointer(
*/
/*else*/ {
__pyx_v_self->dtype_is_object = __pyx_v_dtype_is_object;
}
__pyx_L10:;
/* "View.MemoryView":368
* self.dtype_is_object = dtype_is_object
*
* self.acquisition_count_aligned_p = <__pyx_atomic_int *> align_pointer( # <<<<<<<<<<<<<<
* <void *> &self.acquisition_count[0], sizeof(__pyx_atomic_int))
* self.typeinfo = NULL
*/
__pyx_v_self->acquisition_count_aligned_p = ((__pyx_atomic_int *)__pyx_align_pointer(((void *)(&(__pyx_v_self->acquisition_count[0]))), (sizeof(__pyx_atomic_int))));
/* "View.MemoryView":370
* self.acquisition_count_aligned_p = <__pyx_atomic_int *> align_pointer(
* <void *> &self.acquisition_count[0], sizeof(__pyx_atomic_int))
* self.typeinfo = NULL # <<<<<<<<<<<<<<
*
* def __dealloc__(memoryview self):
*/
__pyx_v_self->typeinfo = NULL;
/* "View.MemoryView":345
* cdef __Pyx_TypeInfo *typeinfo
*
* def __cinit__(memoryview self, object obj, int flags, bint dtype_is_object=False): # <<<<<<<<<<<<<<
* self.obj = obj
* self.flags = flags
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_AddTraceback("View.MemoryView.memoryview.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":372
* self.typeinfo = NULL
*
* def __dealloc__(memoryview self): # <<<<<<<<<<<<<<
* if self.obj is not None:
* __Pyx_ReleaseBuffer(&self.view)
*/
/* Python wrapper */
static void __pyx_memoryview___dealloc__(PyObject *__pyx_v_self); /*proto*/
static void __pyx_memoryview___dealloc__(PyObject *__pyx_v_self) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0);
__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_2__dealloc__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
}
static void __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_2__dealloc__(struct __pyx_memoryview_obj *__pyx_v_self) {
int __pyx_v_i;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_t_4;
int __pyx_t_5;
PyThread_type_lock __pyx_t_6;
PyThread_type_lock __pyx_t_7;
__Pyx_RefNannySetupContext("__dealloc__", 0);
/* "View.MemoryView":373
*
* def __dealloc__(memoryview self):
* if self.obj is not None: # <<<<<<<<<<<<<<
* __Pyx_ReleaseBuffer(&self.view)
* elif (<__pyx_buffer *> &self.view).obj == Py_None:
*/
__pyx_t_1 = (__pyx_v_self->obj != Py_None);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "View.MemoryView":374
* def __dealloc__(memoryview self):
* if self.obj is not None:
* __Pyx_ReleaseBuffer(&self.view) # <<<<<<<<<<<<<<
* elif (<__pyx_buffer *> &self.view).obj == Py_None:
*
*/
__Pyx_ReleaseBuffer((&__pyx_v_self->view));
/* "View.MemoryView":373
*
* def __dealloc__(memoryview self):
* if self.obj is not None: # <<<<<<<<<<<<<<
* __Pyx_ReleaseBuffer(&self.view)
* elif (<__pyx_buffer *> &self.view).obj == Py_None:
*/
goto __pyx_L3;
}
/* "View.MemoryView":375
* if self.obj is not None:
* __Pyx_ReleaseBuffer(&self.view)
* elif (<__pyx_buffer *> &self.view).obj == Py_None: # <<<<<<<<<<<<<<
*
* (<__pyx_buffer *> &self.view).obj = NULL
*/
__pyx_t_2 = ((((Py_buffer *)(&__pyx_v_self->view))->obj == Py_None) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":377
* elif (<__pyx_buffer *> &self.view).obj == Py_None:
*
* (<__pyx_buffer *> &self.view).obj = NULL # <<<<<<<<<<<<<<
* Py_DECREF(Py_None)
*
*/
((Py_buffer *)(&__pyx_v_self->view))->obj = NULL;
/* "View.MemoryView":378
*
* (<__pyx_buffer *> &self.view).obj = NULL
* Py_DECREF(Py_None) # <<<<<<<<<<<<<<
*
* cdef int i
*/
Py_DECREF(Py_None);
/* "View.MemoryView":375
* if self.obj is not None:
* __Pyx_ReleaseBuffer(&self.view)
* elif (<__pyx_buffer *> &self.view).obj == Py_None: # <<<<<<<<<<<<<<
*
* (<__pyx_buffer *> &self.view).obj = NULL
*/
}
__pyx_L3:;
/* "View.MemoryView":382
* cdef int i
* global __pyx_memoryview_thread_locks_used
* if self.lock != NULL: # <<<<<<<<<<<<<<
* for i in range(__pyx_memoryview_thread_locks_used):
* if __pyx_memoryview_thread_locks[i] is self.lock:
*/
__pyx_t_2 = ((__pyx_v_self->lock != NULL) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":383
* global __pyx_memoryview_thread_locks_used
* if self.lock != NULL:
* for i in range(__pyx_memoryview_thread_locks_used): # <<<<<<<<<<<<<<
* if __pyx_memoryview_thread_locks[i] is self.lock:
* __pyx_memoryview_thread_locks_used -= 1
*/
__pyx_t_3 = __pyx_memoryview_thread_locks_used;
__pyx_t_4 = __pyx_t_3;
for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) {
__pyx_v_i = __pyx_t_5;
/* "View.MemoryView":384
* if self.lock != NULL:
* for i in range(__pyx_memoryview_thread_locks_used):
* if __pyx_memoryview_thread_locks[i] is self.lock: # <<<<<<<<<<<<<<
* __pyx_memoryview_thread_locks_used -= 1
* if i != __pyx_memoryview_thread_locks_used:
*/
__pyx_t_2 = (((__pyx_memoryview_thread_locks[__pyx_v_i]) == __pyx_v_self->lock) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":385
* for i in range(__pyx_memoryview_thread_locks_used):
* if __pyx_memoryview_thread_locks[i] is self.lock:
* __pyx_memoryview_thread_locks_used -= 1 # <<<<<<<<<<<<<<
* if i != __pyx_memoryview_thread_locks_used:
* __pyx_memoryview_thread_locks[i], __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] = (
*/
__pyx_memoryview_thread_locks_used = (__pyx_memoryview_thread_locks_used - 1);
/* "View.MemoryView":386
* if __pyx_memoryview_thread_locks[i] is self.lock:
* __pyx_memoryview_thread_locks_used -= 1
* if i != __pyx_memoryview_thread_locks_used: # <<<<<<<<<<<<<<
* __pyx_memoryview_thread_locks[i], __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] = (
* __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used], __pyx_memoryview_thread_locks[i])
*/
__pyx_t_2 = ((__pyx_v_i != __pyx_memoryview_thread_locks_used) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":388
* if i != __pyx_memoryview_thread_locks_used:
* __pyx_memoryview_thread_locks[i], __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] = (
* __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used], __pyx_memoryview_thread_locks[i]) # <<<<<<<<<<<<<<
* break
* else:
*/
__pyx_t_6 = (__pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]);
__pyx_t_7 = (__pyx_memoryview_thread_locks[__pyx_v_i]);
/* "View.MemoryView":387
* __pyx_memoryview_thread_locks_used -= 1
* if i != __pyx_memoryview_thread_locks_used:
* __pyx_memoryview_thread_locks[i], __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] = ( # <<<<<<<<<<<<<<
* __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used], __pyx_memoryview_thread_locks[i])
* break
*/
(__pyx_memoryview_thread_locks[__pyx_v_i]) = __pyx_t_6;
(__pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]) = __pyx_t_7;
/* "View.MemoryView":386
* if __pyx_memoryview_thread_locks[i] is self.lock:
* __pyx_memoryview_thread_locks_used -= 1
* if i != __pyx_memoryview_thread_locks_used: # <<<<<<<<<<<<<<
* __pyx_memoryview_thread_locks[i], __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] = (
* __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used], __pyx_memoryview_thread_locks[i])
*/
}
/* "View.MemoryView":389
* __pyx_memoryview_thread_locks[i], __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used] = (
* __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used], __pyx_memoryview_thread_locks[i])
* break # <<<<<<<<<<<<<<
* else:
* PyThread_free_lock(self.lock)
*/
goto __pyx_L6_break;
/* "View.MemoryView":384
* if self.lock != NULL:
* for i in range(__pyx_memoryview_thread_locks_used):
* if __pyx_memoryview_thread_locks[i] is self.lock: # <<<<<<<<<<<<<<
* __pyx_memoryview_thread_locks_used -= 1
* if i != __pyx_memoryview_thread_locks_used:
*/
}
}
/*else*/ {
/* "View.MemoryView":391
* break
* else:
* PyThread_free_lock(self.lock) # <<<<<<<<<<<<<<
*
* cdef char *get_item_pointer(memoryview self, object index) except NULL:
*/
PyThread_free_lock(__pyx_v_self->lock);
}
__pyx_L6_break:;
/* "View.MemoryView":382
* cdef int i
* global __pyx_memoryview_thread_locks_used
* if self.lock != NULL: # <<<<<<<<<<<<<<
* for i in range(__pyx_memoryview_thread_locks_used):
* if __pyx_memoryview_thread_locks[i] is self.lock:
*/
}
/* "View.MemoryView":372
* self.typeinfo = NULL
*
* def __dealloc__(memoryview self): # <<<<<<<<<<<<<<
* if self.obj is not None:
* __Pyx_ReleaseBuffer(&self.view)
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
}
/* "View.MemoryView":393
* PyThread_free_lock(self.lock)
*
* cdef char *get_item_pointer(memoryview self, object index) except NULL: # <<<<<<<<<<<<<<
* cdef Py_ssize_t dim
* cdef char *itemp = <char *> self.view.buf
*/
static char *__pyx_memoryview_get_item_pointer(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_index) {
Py_ssize_t __pyx_v_dim;
char *__pyx_v_itemp;
PyObject *__pyx_v_idx = NULL;
char *__pyx_r;
__Pyx_RefNannyDeclarations
Py_ssize_t __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
Py_ssize_t __pyx_t_3;
PyObject *(*__pyx_t_4)(PyObject *);
PyObject *__pyx_t_5 = NULL;
Py_ssize_t __pyx_t_6;
char *__pyx_t_7;
__Pyx_RefNannySetupContext("get_item_pointer", 0);
/* "View.MemoryView":395
* cdef char *get_item_pointer(memoryview self, object index) except NULL:
* cdef Py_ssize_t dim
* cdef char *itemp = <char *> self.view.buf # <<<<<<<<<<<<<<
*
* for dim, idx in enumerate(index):
*/
__pyx_v_itemp = ((char *)__pyx_v_self->view.buf);
/* "View.MemoryView":397
* cdef char *itemp = <char *> self.view.buf
*
* for dim, idx in enumerate(index): # <<<<<<<<<<<<<<
* itemp = pybuffer_index(&self.view, itemp, idx, dim)
*
*/
__pyx_t_1 = 0;
if (likely(PyList_CheckExact(__pyx_v_index)) || PyTuple_CheckExact(__pyx_v_index)) {
__pyx_t_2 = __pyx_v_index; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0;
__pyx_t_4 = NULL;
} else {
__pyx_t_3 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_index); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 397, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_4 = Py_TYPE(__pyx_t_2)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 397, __pyx_L1_error)
}
for (;;) {
if (likely(!__pyx_t_4)) {
if (likely(PyList_CheckExact(__pyx_t_2))) {
if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_2)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_5 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_5); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(2, 397, __pyx_L1_error)
#else
__pyx_t_5 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 397, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
#endif
} else {
if (__pyx_t_3 >= PyTuple_GET_SIZE(__pyx_t_2)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_5); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(2, 397, __pyx_L1_error)
#else
__pyx_t_5 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 397, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
#endif
}
} else {
__pyx_t_5 = __pyx_t_4(__pyx_t_2);
if (unlikely(!__pyx_t_5)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(2, 397, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_5);
}
__Pyx_XDECREF_SET(__pyx_v_idx, __pyx_t_5);
__pyx_t_5 = 0;
__pyx_v_dim = __pyx_t_1;
__pyx_t_1 = (__pyx_t_1 + 1);
/* "View.MemoryView":398
*
* for dim, idx in enumerate(index):
* itemp = pybuffer_index(&self.view, itemp, idx, dim) # <<<<<<<<<<<<<<
*
* return itemp
*/
__pyx_t_6 = __Pyx_PyIndex_AsSsize_t(__pyx_v_idx); if (unlikely((__pyx_t_6 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 398, __pyx_L1_error)
__pyx_t_7 = __pyx_pybuffer_index((&__pyx_v_self->view), __pyx_v_itemp, __pyx_t_6, __pyx_v_dim); if (unlikely(__pyx_t_7 == ((char *)NULL))) __PYX_ERR(2, 398, __pyx_L1_error)
__pyx_v_itemp = __pyx_t_7;
/* "View.MemoryView":397
* cdef char *itemp = <char *> self.view.buf
*
* for dim, idx in enumerate(index): # <<<<<<<<<<<<<<
* itemp = pybuffer_index(&self.view, itemp, idx, dim)
*
*/
}
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
/* "View.MemoryView":400
* itemp = pybuffer_index(&self.view, itemp, idx, dim)
*
* return itemp # <<<<<<<<<<<<<<
*
*
*/
__pyx_r = __pyx_v_itemp;
goto __pyx_L0;
/* "View.MemoryView":393
* PyThread_free_lock(self.lock)
*
* cdef char *get_item_pointer(memoryview self, object index) except NULL: # <<<<<<<<<<<<<<
* cdef Py_ssize_t dim
* cdef char *itemp = <char *> self.view.buf
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView.memoryview.get_item_pointer", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_idx);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":403
*
*
* def __getitem__(memoryview self, object index): # <<<<<<<<<<<<<<
* if index is Ellipsis:
* return self
*/
/* Python wrapper */
static PyObject *__pyx_memoryview___getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index); /*proto*/
static PyObject *__pyx_memoryview___getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_4__getitem__(((struct __pyx_memoryview_obj *)__pyx_v_self), ((PyObject *)__pyx_v_index));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_4__getitem__(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_index) {
PyObject *__pyx_v_have_slices = NULL;
PyObject *__pyx_v_indices = NULL;
char *__pyx_v_itemp;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
char *__pyx_t_6;
__Pyx_RefNannySetupContext("__getitem__", 0);
/* "View.MemoryView":404
*
* def __getitem__(memoryview self, object index):
* if index is Ellipsis: # <<<<<<<<<<<<<<
* return self
*
*/
__pyx_t_1 = (__pyx_v_index == __pyx_builtin_Ellipsis);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "View.MemoryView":405
* def __getitem__(memoryview self, object index):
* if index is Ellipsis:
* return self # <<<<<<<<<<<<<<
*
* have_slices, indices = _unellipsify(index, self.view.ndim)
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(((PyObject *)__pyx_v_self));
__pyx_r = ((PyObject *)__pyx_v_self);
goto __pyx_L0;
/* "View.MemoryView":404
*
* def __getitem__(memoryview self, object index):
* if index is Ellipsis: # <<<<<<<<<<<<<<
* return self
*
*/
}
/* "View.MemoryView":407
* return self
*
* have_slices, indices = _unellipsify(index, self.view.ndim) # <<<<<<<<<<<<<<
*
* cdef char *itemp
*/
__pyx_t_3 = _unellipsify(__pyx_v_index, __pyx_v_self->view.ndim); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 407, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
if (likely(__pyx_t_3 != Py_None)) {
PyObject* sequence = __pyx_t_3;
Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
if (unlikely(size != 2)) {
if (size > 2) __Pyx_RaiseTooManyValuesError(2);
else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
__PYX_ERR(2, 407, __pyx_L1_error)
}
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_4 = PyTuple_GET_ITEM(sequence, 0);
__pyx_t_5 = PyTuple_GET_ITEM(sequence, 1);
__Pyx_INCREF(__pyx_t_4);
__Pyx_INCREF(__pyx_t_5);
#else
__pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 407, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 407, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
#endif
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
} else {
__Pyx_RaiseNoneNotIterableError(); __PYX_ERR(2, 407, __pyx_L1_error)
}
__pyx_v_have_slices = __pyx_t_4;
__pyx_t_4 = 0;
__pyx_v_indices = __pyx_t_5;
__pyx_t_5 = 0;
/* "View.MemoryView":410
*
* cdef char *itemp
* if have_slices: # <<<<<<<<<<<<<<
* return memview_slice(self, indices)
* else:
*/
__pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_have_slices); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(2, 410, __pyx_L1_error)
if (__pyx_t_2) {
/* "View.MemoryView":411
* cdef char *itemp
* if have_slices:
* return memview_slice(self, indices) # <<<<<<<<<<<<<<
* else:
* itemp = self.get_item_pointer(indices)
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_3 = ((PyObject *)__pyx_memview_slice(__pyx_v_self, __pyx_v_indices)); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 411, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_r = __pyx_t_3;
__pyx_t_3 = 0;
goto __pyx_L0;
/* "View.MemoryView":410
*
* cdef char *itemp
* if have_slices: # <<<<<<<<<<<<<<
* return memview_slice(self, indices)
* else:
*/
}
/* "View.MemoryView":413
* return memview_slice(self, indices)
* else:
* itemp = self.get_item_pointer(indices) # <<<<<<<<<<<<<<
* return self.convert_item_to_object(itemp)
*
*/
/*else*/ {
__pyx_t_6 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->get_item_pointer(__pyx_v_self, __pyx_v_indices); if (unlikely(__pyx_t_6 == ((char *)NULL))) __PYX_ERR(2, 413, __pyx_L1_error)
__pyx_v_itemp = __pyx_t_6;
/* "View.MemoryView":414
* else:
* itemp = self.get_item_pointer(indices)
* return self.convert_item_to_object(itemp) # <<<<<<<<<<<<<<
*
* def __setitem__(memoryview self, object index, object value):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_3 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->convert_item_to_object(__pyx_v_self, __pyx_v_itemp); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 414, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_r = __pyx_t_3;
__pyx_t_3 = 0;
goto __pyx_L0;
}
/* "View.MemoryView":403
*
*
* def __getitem__(memoryview self, object index): # <<<<<<<<<<<<<<
* if index is Ellipsis:
* return self
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView.memoryview.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_have_slices);
__Pyx_XDECREF(__pyx_v_indices);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":416
* return self.convert_item_to_object(itemp)
*
* def __setitem__(memoryview self, object index, object value): # <<<<<<<<<<<<<<
* if self.view.readonly:
* raise TypeError("Cannot assign to read-only memoryview")
*/
/* Python wrapper */
static int __pyx_memoryview___setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /*proto*/
static int __pyx_memoryview___setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) {
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_6__setitem__(((struct __pyx_memoryview_obj *)__pyx_v_self), ((PyObject *)__pyx_v_index), ((PyObject *)__pyx_v_value));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_6__setitem__(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) {
PyObject *__pyx_v_have_slices = NULL;
PyObject *__pyx_v_obj = NULL;
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
__Pyx_RefNannySetupContext("__setitem__", 0);
__Pyx_INCREF(__pyx_v_index);
/* "View.MemoryView":417
*
* def __setitem__(memoryview self, object index, object value):
* if self.view.readonly: # <<<<<<<<<<<<<<
* raise TypeError("Cannot assign to read-only memoryview")
*
*/
__pyx_t_1 = (__pyx_v_self->view.readonly != 0);
if (unlikely(__pyx_t_1)) {
/* "View.MemoryView":418
* def __setitem__(memoryview self, object index, object value):
* if self.view.readonly:
* raise TypeError("Cannot assign to read-only memoryview") # <<<<<<<<<<<<<<
*
* have_slices, index = _unellipsify(index, self.view.ndim)
*/
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__16, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 418, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_Raise(__pyx_t_2, 0, 0, 0);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__PYX_ERR(2, 418, __pyx_L1_error)
/* "View.MemoryView":417
*
* def __setitem__(memoryview self, object index, object value):
* if self.view.readonly: # <<<<<<<<<<<<<<
* raise TypeError("Cannot assign to read-only memoryview")
*
*/
}
/* "View.MemoryView":420
* raise TypeError("Cannot assign to read-only memoryview")
*
* have_slices, index = _unellipsify(index, self.view.ndim) # <<<<<<<<<<<<<<
*
* if have_slices:
*/
__pyx_t_2 = _unellipsify(__pyx_v_index, __pyx_v_self->view.ndim); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 420, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
if (likely(__pyx_t_2 != Py_None)) {
PyObject* sequence = __pyx_t_2;
Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);
if (unlikely(size != 2)) {
if (size > 2) __Pyx_RaiseTooManyValuesError(2);
else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);
__PYX_ERR(2, 420, __pyx_L1_error)
}
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_3 = PyTuple_GET_ITEM(sequence, 0);
__pyx_t_4 = PyTuple_GET_ITEM(sequence, 1);
__Pyx_INCREF(__pyx_t_3);
__Pyx_INCREF(__pyx_t_4);
#else
__pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 420, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 420, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
#endif
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
} else {
__Pyx_RaiseNoneNotIterableError(); __PYX_ERR(2, 420, __pyx_L1_error)
}
__pyx_v_have_slices = __pyx_t_3;
__pyx_t_3 = 0;
__Pyx_DECREF_SET(__pyx_v_index, __pyx_t_4);
__pyx_t_4 = 0;
/* "View.MemoryView":422
* have_slices, index = _unellipsify(index, self.view.ndim)
*
* if have_slices: # <<<<<<<<<<<<<<
* obj = self.is_slice(value)
* if obj:
*/
__pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_have_slices); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(2, 422, __pyx_L1_error)
if (__pyx_t_1) {
/* "View.MemoryView":423
*
* if have_slices:
* obj = self.is_slice(value) # <<<<<<<<<<<<<<
* if obj:
* self.setitem_slice_assignment(self[index], obj)
*/
__pyx_t_2 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->is_slice(__pyx_v_self, __pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 423, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_v_obj = __pyx_t_2;
__pyx_t_2 = 0;
/* "View.MemoryView":424
* if have_slices:
* obj = self.is_slice(value)
* if obj: # <<<<<<<<<<<<<<
* self.setitem_slice_assignment(self[index], obj)
* else:
*/
__pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_obj); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(2, 424, __pyx_L1_error)
if (__pyx_t_1) {
/* "View.MemoryView":425
* obj = self.is_slice(value)
* if obj:
* self.setitem_slice_assignment(self[index], obj) # <<<<<<<<<<<<<<
* else:
* self.setitem_slice_assign_scalar(self[index], value)
*/
__pyx_t_2 = __Pyx_PyObject_GetItem(((PyObject *)__pyx_v_self), __pyx_v_index); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 425, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_4 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->setitem_slice_assignment(__pyx_v_self, __pyx_t_2, __pyx_v_obj); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 425, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
/* "View.MemoryView":424
* if have_slices:
* obj = self.is_slice(value)
* if obj: # <<<<<<<<<<<<<<
* self.setitem_slice_assignment(self[index], obj)
* else:
*/
goto __pyx_L5;
}
/* "View.MemoryView":427
* self.setitem_slice_assignment(self[index], obj)
* else:
* self.setitem_slice_assign_scalar(self[index], value) # <<<<<<<<<<<<<<
* else:
* self.setitem_indexed(index, value)
*/
/*else*/ {
__pyx_t_4 = __Pyx_PyObject_GetItem(((PyObject *)__pyx_v_self), __pyx_v_index); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 427, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_memoryview_type))))) __PYX_ERR(2, 427, __pyx_L1_error)
__pyx_t_2 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->setitem_slice_assign_scalar(__pyx_v_self, ((struct __pyx_memoryview_obj *)__pyx_t_4), __pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 427, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
}
__pyx_L5:;
/* "View.MemoryView":422
* have_slices, index = _unellipsify(index, self.view.ndim)
*
* if have_slices: # <<<<<<<<<<<<<<
* obj = self.is_slice(value)
* if obj:
*/
goto __pyx_L4;
}
/* "View.MemoryView":429
* self.setitem_slice_assign_scalar(self[index], value)
* else:
* self.setitem_indexed(index, value) # <<<<<<<<<<<<<<
*
* cdef is_slice(self, obj):
*/
/*else*/ {
__pyx_t_2 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->setitem_indexed(__pyx_v_self, __pyx_v_index, __pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 429, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
}
__pyx_L4:;
/* "View.MemoryView":416
* return self.convert_item_to_object(itemp)
*
* def __setitem__(memoryview self, object index, object value): # <<<<<<<<<<<<<<
* if self.view.readonly:
* raise TypeError("Cannot assign to read-only memoryview")
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_AddTraceback("View.MemoryView.memoryview.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_have_slices);
__Pyx_XDECREF(__pyx_v_obj);
__Pyx_XDECREF(__pyx_v_index);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":431
* self.setitem_indexed(index, value)
*
* cdef is_slice(self, obj): # <<<<<<<<<<<<<<
* if not isinstance(obj, memoryview):
* try:
*/
static PyObject *__pyx_memoryview_is_slice(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_obj) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
int __pyx_t_9;
__Pyx_RefNannySetupContext("is_slice", 0);
__Pyx_INCREF(__pyx_v_obj);
/* "View.MemoryView":432
*
* cdef is_slice(self, obj):
* if not isinstance(obj, memoryview): # <<<<<<<<<<<<<<
* try:
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS,
*/
__pyx_t_1 = __Pyx_TypeCheck(__pyx_v_obj, __pyx_memoryview_type);
__pyx_t_2 = ((!(__pyx_t_1 != 0)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":433
* cdef is_slice(self, obj):
* if not isinstance(obj, memoryview):
* try: # <<<<<<<<<<<<<<
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS,
* self.dtype_is_object)
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_3, &__pyx_t_4, &__pyx_t_5);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_5);
/*try:*/ {
/* "View.MemoryView":434
* if not isinstance(obj, memoryview):
* try:
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS, # <<<<<<<<<<<<<<
* self.dtype_is_object)
* except TypeError:
*/
__pyx_t_6 = __Pyx_PyInt_From_int(((__pyx_v_self->flags & (~PyBUF_WRITABLE)) | PyBUF_ANY_CONTIGUOUS)); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 434, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_6);
/* "View.MemoryView":435
* try:
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS,
* self.dtype_is_object) # <<<<<<<<<<<<<<
* except TypeError:
* return None
*/
__pyx_t_7 = __Pyx_PyBool_FromLong(__pyx_v_self->dtype_is_object); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 435, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_7);
/* "View.MemoryView":434
* if not isinstance(obj, memoryview):
* try:
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS, # <<<<<<<<<<<<<<
* self.dtype_is_object)
* except TypeError:
*/
__pyx_t_8 = PyTuple_New(3); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 434, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_8);
__Pyx_INCREF(__pyx_v_obj);
__Pyx_GIVEREF(__pyx_v_obj);
PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v_obj);
__Pyx_GIVEREF(__pyx_t_6);
PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_6);
__Pyx_GIVEREF(__pyx_t_7);
PyTuple_SET_ITEM(__pyx_t_8, 2, __pyx_t_7);
__pyx_t_6 = 0;
__pyx_t_7 = 0;
__pyx_t_7 = __Pyx_PyObject_Call(((PyObject *)__pyx_memoryview_type), __pyx_t_8, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 434, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_7);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
__Pyx_DECREF_SET(__pyx_v_obj, __pyx_t_7);
__pyx_t_7 = 0;
/* "View.MemoryView":433
* cdef is_slice(self, obj):
* if not isinstance(obj, memoryview):
* try: # <<<<<<<<<<<<<<
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS,
* self.dtype_is_object)
*/
}
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
goto __pyx_L9_try_end;
__pyx_L4_error:;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
/* "View.MemoryView":436
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS,
* self.dtype_is_object)
* except TypeError: # <<<<<<<<<<<<<<
* return None
*
*/
__pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_TypeError);
if (__pyx_t_9) {
__Pyx_AddTraceback("View.MemoryView.memoryview.is_slice", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_8, &__pyx_t_6) < 0) __PYX_ERR(2, 436, __pyx_L6_except_error)
__Pyx_GOTREF(__pyx_t_7);
__Pyx_GOTREF(__pyx_t_8);
__Pyx_GOTREF(__pyx_t_6);
/* "View.MemoryView":437
* self.dtype_is_object)
* except TypeError:
* return None # <<<<<<<<<<<<<<
*
* return obj
*/
__Pyx_XDECREF(__pyx_r);
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
goto __pyx_L7_except_return;
}
goto __pyx_L6_except_error;
__pyx_L6_except_error:;
/* "View.MemoryView":433
* cdef is_slice(self, obj):
* if not isinstance(obj, memoryview):
* try: # <<<<<<<<<<<<<<
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS,
* self.dtype_is_object)
*/
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_XGIVEREF(__pyx_t_5);
__Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5);
goto __pyx_L1_error;
__pyx_L7_except_return:;
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_XGIVEREF(__pyx_t_5);
__Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5);
goto __pyx_L0;
__pyx_L9_try_end:;
}
/* "View.MemoryView":432
*
* cdef is_slice(self, obj):
* if not isinstance(obj, memoryview): # <<<<<<<<<<<<<<
* try:
* obj = memoryview(obj, self.flags & ~PyBUF_WRITABLE | PyBUF_ANY_CONTIGUOUS,
*/
}
/* "View.MemoryView":439
* return None
*
* return obj # <<<<<<<<<<<<<<
*
* cdef setitem_slice_assignment(self, dst, src):
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_obj);
__pyx_r = __pyx_v_obj;
goto __pyx_L0;
/* "View.MemoryView":431
* self.setitem_indexed(index, value)
*
* cdef is_slice(self, obj): # <<<<<<<<<<<<<<
* if not isinstance(obj, memoryview):
* try:
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("View.MemoryView.memoryview.is_slice", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_obj);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":441
* return obj
*
* cdef setitem_slice_assignment(self, dst, src): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice dst_slice
* cdef __Pyx_memviewslice src_slice
*/
static PyObject *__pyx_memoryview_setitem_slice_assignment(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_dst, PyObject *__pyx_v_src) {
__Pyx_memviewslice __pyx_v_dst_slice;
__Pyx_memviewslice __pyx_v_src_slice;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_memviewslice *__pyx_t_1;
__Pyx_memviewslice *__pyx_t_2;
PyObject *__pyx_t_3 = NULL;
int __pyx_t_4;
int __pyx_t_5;
int __pyx_t_6;
__Pyx_RefNannySetupContext("setitem_slice_assignment", 0);
/* "View.MemoryView":445
* cdef __Pyx_memviewslice src_slice
*
* memoryview_copy_contents(get_slice_from_memview(src, &src_slice)[0], # <<<<<<<<<<<<<<
* get_slice_from_memview(dst, &dst_slice)[0],
* src.ndim, dst.ndim, self.dtype_is_object)
*/
if (!(likely(((__pyx_v_src) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_src, __pyx_memoryview_type))))) __PYX_ERR(2, 445, __pyx_L1_error)
__pyx_t_1 = __pyx_memoryview_get_slice_from_memoryview(((struct __pyx_memoryview_obj *)__pyx_v_src), (&__pyx_v_src_slice)); if (unlikely(__pyx_t_1 == ((__Pyx_memviewslice *)NULL))) __PYX_ERR(2, 445, __pyx_L1_error)
/* "View.MemoryView":446
*
* memoryview_copy_contents(get_slice_from_memview(src, &src_slice)[0],
* get_slice_from_memview(dst, &dst_slice)[0], # <<<<<<<<<<<<<<
* src.ndim, dst.ndim, self.dtype_is_object)
*
*/
if (!(likely(((__pyx_v_dst) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_dst, __pyx_memoryview_type))))) __PYX_ERR(2, 446, __pyx_L1_error)
__pyx_t_2 = __pyx_memoryview_get_slice_from_memoryview(((struct __pyx_memoryview_obj *)__pyx_v_dst), (&__pyx_v_dst_slice)); if (unlikely(__pyx_t_2 == ((__Pyx_memviewslice *)NULL))) __PYX_ERR(2, 446, __pyx_L1_error)
/* "View.MemoryView":447
* memoryview_copy_contents(get_slice_from_memview(src, &src_slice)[0],
* get_slice_from_memview(dst, &dst_slice)[0],
* src.ndim, dst.ndim, self.dtype_is_object) # <<<<<<<<<<<<<<
*
* cdef setitem_slice_assign_scalar(self, memoryview dst, value):
*/
__pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_src, __pyx_n_s_ndim); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 447, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 447, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_dst, __pyx_n_s_ndim); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 447, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 447, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "View.MemoryView":445
* cdef __Pyx_memviewslice src_slice
*
* memoryview_copy_contents(get_slice_from_memview(src, &src_slice)[0], # <<<<<<<<<<<<<<
* get_slice_from_memview(dst, &dst_slice)[0],
* src.ndim, dst.ndim, self.dtype_is_object)
*/
__pyx_t_6 = __pyx_memoryview_copy_contents((__pyx_t_1[0]), (__pyx_t_2[0]), __pyx_t_4, __pyx_t_5, __pyx_v_self->dtype_is_object); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(2, 445, __pyx_L1_error)
/* "View.MemoryView":441
* return obj
*
* cdef setitem_slice_assignment(self, dst, src): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice dst_slice
* cdef __Pyx_memviewslice src_slice
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.memoryview.setitem_slice_assignment", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":449
* src.ndim, dst.ndim, self.dtype_is_object)
*
* cdef setitem_slice_assign_scalar(self, memoryview dst, value): # <<<<<<<<<<<<<<
* cdef int array[128]
* cdef void *tmp = NULL
*/
static PyObject *__pyx_memoryview_setitem_slice_assign_scalar(struct __pyx_memoryview_obj *__pyx_v_self, struct __pyx_memoryview_obj *__pyx_v_dst, PyObject *__pyx_v_value) {
int __pyx_v_array[0x80];
void *__pyx_v_tmp;
void *__pyx_v_item;
__Pyx_memviewslice *__pyx_v_dst_slice;
__Pyx_memviewslice __pyx_v_tmp_slice;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_memviewslice *__pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
int __pyx_t_4;
int __pyx_t_5;
char const *__pyx_t_6;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
PyObject *__pyx_t_9 = NULL;
PyObject *__pyx_t_10 = NULL;
PyObject *__pyx_t_11 = NULL;
PyObject *__pyx_t_12 = NULL;
__Pyx_RefNannySetupContext("setitem_slice_assign_scalar", 0);
/* "View.MemoryView":451
* cdef setitem_slice_assign_scalar(self, memoryview dst, value):
* cdef int array[128]
* cdef void *tmp = NULL # <<<<<<<<<<<<<<
* cdef void *item
*
*/
__pyx_v_tmp = NULL;
/* "View.MemoryView":456
* cdef __Pyx_memviewslice *dst_slice
* cdef __Pyx_memviewslice tmp_slice
* dst_slice = get_slice_from_memview(dst, &tmp_slice) # <<<<<<<<<<<<<<
*
* if <size_t>self.view.itemsize > sizeof(array):
*/
__pyx_t_1 = __pyx_memoryview_get_slice_from_memoryview(__pyx_v_dst, (&__pyx_v_tmp_slice)); if (unlikely(__pyx_t_1 == ((__Pyx_memviewslice *)NULL))) __PYX_ERR(2, 456, __pyx_L1_error)
__pyx_v_dst_slice = __pyx_t_1;
/* "View.MemoryView":458
* dst_slice = get_slice_from_memview(dst, &tmp_slice)
*
* if <size_t>self.view.itemsize > sizeof(array): # <<<<<<<<<<<<<<
* tmp = PyMem_Malloc(self.view.itemsize)
* if tmp == NULL:
*/
__pyx_t_2 = ((((size_t)__pyx_v_self->view.itemsize) > (sizeof(__pyx_v_array))) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":459
*
* if <size_t>self.view.itemsize > sizeof(array):
* tmp = PyMem_Malloc(self.view.itemsize) # <<<<<<<<<<<<<<
* if tmp == NULL:
* raise MemoryError
*/
__pyx_v_tmp = PyMem_Malloc(__pyx_v_self->view.itemsize);
/* "View.MemoryView":460
* if <size_t>self.view.itemsize > sizeof(array):
* tmp = PyMem_Malloc(self.view.itemsize)
* if tmp == NULL: # <<<<<<<<<<<<<<
* raise MemoryError
* item = tmp
*/
__pyx_t_2 = ((__pyx_v_tmp == NULL) != 0);
if (unlikely(__pyx_t_2)) {
/* "View.MemoryView":461
* tmp = PyMem_Malloc(self.view.itemsize)
* if tmp == NULL:
* raise MemoryError # <<<<<<<<<<<<<<
* item = tmp
* else:
*/
PyErr_NoMemory(); __PYX_ERR(2, 461, __pyx_L1_error)
/* "View.MemoryView":460
* if <size_t>self.view.itemsize > sizeof(array):
* tmp = PyMem_Malloc(self.view.itemsize)
* if tmp == NULL: # <<<<<<<<<<<<<<
* raise MemoryError
* item = tmp
*/
}
/* "View.MemoryView":462
* if tmp == NULL:
* raise MemoryError
* item = tmp # <<<<<<<<<<<<<<
* else:
* item = <void *> array
*/
__pyx_v_item = __pyx_v_tmp;
/* "View.MemoryView":458
* dst_slice = get_slice_from_memview(dst, &tmp_slice)
*
* if <size_t>self.view.itemsize > sizeof(array): # <<<<<<<<<<<<<<
* tmp = PyMem_Malloc(self.view.itemsize)
* if tmp == NULL:
*/
goto __pyx_L3;
}
/* "View.MemoryView":464
* item = tmp
* else:
* item = <void *> array # <<<<<<<<<<<<<<
*
* try:
*/
/*else*/ {
__pyx_v_item = ((void *)__pyx_v_array);
}
__pyx_L3:;
/* "View.MemoryView":466
* item = <void *> array
*
* try: # <<<<<<<<<<<<<<
* if self.dtype_is_object:
* (<PyObject **> item)[0] = <PyObject *> value
*/
/*try:*/ {
/* "View.MemoryView":467
*
* try:
* if self.dtype_is_object: # <<<<<<<<<<<<<<
* (<PyObject **> item)[0] = <PyObject *> value
* else:
*/
__pyx_t_2 = (__pyx_v_self->dtype_is_object != 0);
if (__pyx_t_2) {
/* "View.MemoryView":468
* try:
* if self.dtype_is_object:
* (<PyObject **> item)[0] = <PyObject *> value # <<<<<<<<<<<<<<
* else:
* self.assign_item_from_object(<char *> item, value)
*/
(((PyObject **)__pyx_v_item)[0]) = ((PyObject *)__pyx_v_value);
/* "View.MemoryView":467
*
* try:
* if self.dtype_is_object: # <<<<<<<<<<<<<<
* (<PyObject **> item)[0] = <PyObject *> value
* else:
*/
goto __pyx_L8;
}
/* "View.MemoryView":470
* (<PyObject **> item)[0] = <PyObject *> value
* else:
* self.assign_item_from_object(<char *> item, value) # <<<<<<<<<<<<<<
*
*
*/
/*else*/ {
__pyx_t_3 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->assign_item_from_object(__pyx_v_self, ((char *)__pyx_v_item), __pyx_v_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 470, __pyx_L6_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
}
__pyx_L8:;
/* "View.MemoryView":474
*
*
* if self.view.suboffsets != NULL: # <<<<<<<<<<<<<<
* assert_direct_dimensions(self.view.suboffsets, self.view.ndim)
* slice_assign_scalar(dst_slice, dst.view.ndim, self.view.itemsize,
*/
__pyx_t_2 = ((__pyx_v_self->view.suboffsets != NULL) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":475
*
* if self.view.suboffsets != NULL:
* assert_direct_dimensions(self.view.suboffsets, self.view.ndim) # <<<<<<<<<<<<<<
* slice_assign_scalar(dst_slice, dst.view.ndim, self.view.itemsize,
* item, self.dtype_is_object)
*/
__pyx_t_3 = assert_direct_dimensions(__pyx_v_self->view.suboffsets, __pyx_v_self->view.ndim); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 475, __pyx_L6_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "View.MemoryView":474
*
*
* if self.view.suboffsets != NULL: # <<<<<<<<<<<<<<
* assert_direct_dimensions(self.view.suboffsets, self.view.ndim)
* slice_assign_scalar(dst_slice, dst.view.ndim, self.view.itemsize,
*/
}
/* "View.MemoryView":476
* if self.view.suboffsets != NULL:
* assert_direct_dimensions(self.view.suboffsets, self.view.ndim)
* slice_assign_scalar(dst_slice, dst.view.ndim, self.view.itemsize, # <<<<<<<<<<<<<<
* item, self.dtype_is_object)
* finally:
*/
__pyx_memoryview_slice_assign_scalar(__pyx_v_dst_slice, __pyx_v_dst->view.ndim, __pyx_v_self->view.itemsize, __pyx_v_item, __pyx_v_self->dtype_is_object);
}
/* "View.MemoryView":479
* item, self.dtype_is_object)
* finally:
* PyMem_Free(tmp) # <<<<<<<<<<<<<<
*
* cdef setitem_indexed(self, index, value):
*/
/*finally:*/ {
/*normal exit:*/{
PyMem_Free(__pyx_v_tmp);
goto __pyx_L7;
}
__pyx_L6_error:;
/*exception exit:*/{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0;
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12);
if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9) < 0)) __Pyx_ErrFetch(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9);
__Pyx_XGOTREF(__pyx_t_7);
__Pyx_XGOTREF(__pyx_t_8);
__Pyx_XGOTREF(__pyx_t_9);
__Pyx_XGOTREF(__pyx_t_10);
__Pyx_XGOTREF(__pyx_t_11);
__Pyx_XGOTREF(__pyx_t_12);
__pyx_t_4 = __pyx_lineno; __pyx_t_5 = __pyx_clineno; __pyx_t_6 = __pyx_filename;
{
PyMem_Free(__pyx_v_tmp);
}
if (PY_MAJOR_VERSION >= 3) {
__Pyx_XGIVEREF(__pyx_t_10);
__Pyx_XGIVEREF(__pyx_t_11);
__Pyx_XGIVEREF(__pyx_t_12);
__Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12);
}
__Pyx_XGIVEREF(__pyx_t_7);
__Pyx_XGIVEREF(__pyx_t_8);
__Pyx_XGIVEREF(__pyx_t_9);
__Pyx_ErrRestore(__pyx_t_7, __pyx_t_8, __pyx_t_9);
__pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0;
__pyx_lineno = __pyx_t_4; __pyx_clineno = __pyx_t_5; __pyx_filename = __pyx_t_6;
goto __pyx_L1_error;
}
__pyx_L7:;
}
/* "View.MemoryView":449
* src.ndim, dst.ndim, self.dtype_is_object)
*
* cdef setitem_slice_assign_scalar(self, memoryview dst, value): # <<<<<<<<<<<<<<
* cdef int array[128]
* cdef void *tmp = NULL
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.memoryview.setitem_slice_assign_scalar", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":481
* PyMem_Free(tmp)
*
* cdef setitem_indexed(self, index, value): # <<<<<<<<<<<<<<
* cdef char *itemp = self.get_item_pointer(index)
* self.assign_item_from_object(itemp, value)
*/
static PyObject *__pyx_memoryview_setitem_indexed(struct __pyx_memoryview_obj *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) {
char *__pyx_v_itemp;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
char *__pyx_t_1;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("setitem_indexed", 0);
/* "View.MemoryView":482
*
* cdef setitem_indexed(self, index, value):
* cdef char *itemp = self.get_item_pointer(index) # <<<<<<<<<<<<<<
* self.assign_item_from_object(itemp, value)
*
*/
__pyx_t_1 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->get_item_pointer(__pyx_v_self, __pyx_v_index); if (unlikely(__pyx_t_1 == ((char *)NULL))) __PYX_ERR(2, 482, __pyx_L1_error)
__pyx_v_itemp = __pyx_t_1;
/* "View.MemoryView":483
* cdef setitem_indexed(self, index, value):
* cdef char *itemp = self.get_item_pointer(index)
* self.assign_item_from_object(itemp, value) # <<<<<<<<<<<<<<
*
* cdef convert_item_to_object(self, char *itemp):
*/
__pyx_t_2 = ((struct __pyx_vtabstruct_memoryview *)__pyx_v_self->__pyx_vtab)->assign_item_from_object(__pyx_v_self, __pyx_v_itemp, __pyx_v_value); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 483, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
/* "View.MemoryView":481
* PyMem_Free(tmp)
*
* cdef setitem_indexed(self, index, value): # <<<<<<<<<<<<<<
* cdef char *itemp = self.get_item_pointer(index)
* self.assign_item_from_object(itemp, value)
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView.memoryview.setitem_indexed", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":485
* self.assign_item_from_object(itemp, value)
*
* cdef convert_item_to_object(self, char *itemp): # <<<<<<<<<<<<<<
* """Only used if instantiated manually by the user, or if Cython doesn't
* know how to convert the type"""
*/
static PyObject *__pyx_memoryview_convert_item_to_object(struct __pyx_memoryview_obj *__pyx_v_self, char *__pyx_v_itemp) {
PyObject *__pyx_v_struct = NULL;
PyObject *__pyx_v_bytesitem = 0;
PyObject *__pyx_v_result = NULL;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
int __pyx_t_8;
PyObject *__pyx_t_9 = NULL;
size_t __pyx_t_10;
int __pyx_t_11;
__Pyx_RefNannySetupContext("convert_item_to_object", 0);
/* "View.MemoryView":488
* """Only used if instantiated manually by the user, or if Cython doesn't
* know how to convert the type"""
* import struct # <<<<<<<<<<<<<<
* cdef bytes bytesitem
*
*/
__pyx_t_1 = __Pyx_Import(__pyx_n_s_struct, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 488, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_struct = __pyx_t_1;
__pyx_t_1 = 0;
/* "View.MemoryView":491
* cdef bytes bytesitem
*
* bytesitem = itemp[:self.view.itemsize] # <<<<<<<<<<<<<<
* try:
* result = struct.unpack(self.view.format, bytesitem)
*/
__pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_itemp + 0, __pyx_v_self->view.itemsize - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 491, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_bytesitem = ((PyObject*)__pyx_t_1);
__pyx_t_1 = 0;
/* "View.MemoryView":492
*
* bytesitem = itemp[:self.view.itemsize]
* try: # <<<<<<<<<<<<<<
* result = struct.unpack(self.view.format, bytesitem)
* except struct.error:
*/
{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4);
__Pyx_XGOTREF(__pyx_t_2);
__Pyx_XGOTREF(__pyx_t_3);
__Pyx_XGOTREF(__pyx_t_4);
/*try:*/ {
/* "View.MemoryView":493
* bytesitem = itemp[:self.view.itemsize]
* try:
* result = struct.unpack(self.view.format, bytesitem) # <<<<<<<<<<<<<<
* except struct.error:
* raise ValueError("Unable to convert item to object")
*/
__pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_struct, __pyx_n_s_unpack); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 493, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_t_6 = __Pyx_PyBytes_FromString(__pyx_v_self->view.format); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 493, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_6);
__pyx_t_7 = NULL;
__pyx_t_8 = 0;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {
__pyx_t_7 = PyMethod_GET_SELF(__pyx_t_5);
if (likely(__pyx_t_7)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);
__Pyx_INCREF(__pyx_t_7);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_5, function);
__pyx_t_8 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_5)) {
PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_6, __pyx_v_bytesitem};
__pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 493, __pyx_L3_error)
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) {
PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_6, __pyx_v_bytesitem};
__pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 493, __pyx_L3_error)
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
} else
#endif
{
__pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 493, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_9);
if (__pyx_t_7) {
__Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL;
}
__Pyx_GIVEREF(__pyx_t_6);
PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_t_6);
__Pyx_INCREF(__pyx_v_bytesitem);
__Pyx_GIVEREF(__pyx_v_bytesitem);
PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_v_bytesitem);
__pyx_t_6 = 0;
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 493, __pyx_L3_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
}
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__pyx_v_result = __pyx_t_1;
__pyx_t_1 = 0;
/* "View.MemoryView":492
*
* bytesitem = itemp[:self.view.itemsize]
* try: # <<<<<<<<<<<<<<
* result = struct.unpack(self.view.format, bytesitem)
* except struct.error:
*/
}
/* "View.MemoryView":497
* raise ValueError("Unable to convert item to object")
* else:
* if len(self.view.format) == 1: # <<<<<<<<<<<<<<
* return result[0]
* return result
*/
/*else:*/ {
__pyx_t_10 = strlen(__pyx_v_self->view.format);
__pyx_t_11 = ((__pyx_t_10 == 1) != 0);
if (__pyx_t_11) {
/* "View.MemoryView":498
* else:
* if len(self.view.format) == 1:
* return result[0] # <<<<<<<<<<<<<<
* return result
*
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_GetItemInt(__pyx_v_result, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 498, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L6_except_return;
/* "View.MemoryView":497
* raise ValueError("Unable to convert item to object")
* else:
* if len(self.view.format) == 1: # <<<<<<<<<<<<<<
* return result[0]
* return result
*/
}
/* "View.MemoryView":499
* if len(self.view.format) == 1:
* return result[0]
* return result # <<<<<<<<<<<<<<
*
* cdef assign_item_from_object(self, char *itemp, object value):
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_result);
__pyx_r = __pyx_v_result;
goto __pyx_L6_except_return;
}
__pyx_L3_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;
/* "View.MemoryView":494
* try:
* result = struct.unpack(self.view.format, bytesitem)
* except struct.error: # <<<<<<<<<<<<<<
* raise ValueError("Unable to convert item to object")
* else:
*/
__Pyx_ErrFetch(&__pyx_t_1, &__pyx_t_5, &__pyx_t_9);
__pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_struct, __pyx_n_s_error); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 494, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_6);
__pyx_t_8 = __Pyx_PyErr_GivenExceptionMatches(__pyx_t_1, __pyx_t_6);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_ErrRestore(__pyx_t_1, __pyx_t_5, __pyx_t_9);
__pyx_t_1 = 0; __pyx_t_5 = 0; __pyx_t_9 = 0;
if (__pyx_t_8) {
__Pyx_AddTraceback("View.MemoryView.memoryview.convert_item_to_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_9, &__pyx_t_5, &__pyx_t_1) < 0) __PYX_ERR(2, 494, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_9);
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GOTREF(__pyx_t_1);
/* "View.MemoryView":495
* result = struct.unpack(self.view.format, bytesitem)
* except struct.error:
* raise ValueError("Unable to convert item to object") # <<<<<<<<<<<<<<
* else:
* if len(self.view.format) == 1:
*/
__pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__17, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 495, __pyx_L5_except_error)
__Pyx_GOTREF(__pyx_t_6);
__Pyx_Raise(__pyx_t_6, 0, 0, 0);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__PYX_ERR(2, 495, __pyx_L5_except_error)
}
goto __pyx_L5_except_error;
__pyx_L5_except_error:;
/* "View.MemoryView":492
*
* bytesitem = itemp[:self.view.itemsize]
* try: # <<<<<<<<<<<<<<
* result = struct.unpack(self.view.format, bytesitem)
* except struct.error:
*/
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L1_error;
__pyx_L6_except_return:;
__Pyx_XGIVEREF(__pyx_t_2);
__Pyx_XGIVEREF(__pyx_t_3);
__Pyx_XGIVEREF(__pyx_t_4);
__Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4);
goto __pyx_L0;
}
/* "View.MemoryView":485
* self.assign_item_from_object(itemp, value)
*
* cdef convert_item_to_object(self, char *itemp): # <<<<<<<<<<<<<<
* """Only used if instantiated manually by the user, or if Cython doesn't
* know how to convert the type"""
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_9);
__Pyx_AddTraceback("View.MemoryView.memoryview.convert_item_to_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_struct);
__Pyx_XDECREF(__pyx_v_bytesitem);
__Pyx_XDECREF(__pyx_v_result);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":501
* return result
*
* cdef assign_item_from_object(self, char *itemp, object value): # <<<<<<<<<<<<<<
* """Only used if instantiated manually by the user, or if Cython doesn't
* know how to convert the type"""
*/
static PyObject *__pyx_memoryview_assign_item_from_object(struct __pyx_memoryview_obj *__pyx_v_self, char *__pyx_v_itemp, PyObject *__pyx_v_value) {
PyObject *__pyx_v_struct = NULL;
char __pyx_v_c;
PyObject *__pyx_v_bytesvalue = 0;
Py_ssize_t __pyx_v_i;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_t_3;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
int __pyx_t_7;
PyObject *__pyx_t_8 = NULL;
Py_ssize_t __pyx_t_9;
PyObject *__pyx_t_10 = NULL;
char *__pyx_t_11;
char *__pyx_t_12;
char *__pyx_t_13;
char *__pyx_t_14;
__Pyx_RefNannySetupContext("assign_item_from_object", 0);
/* "View.MemoryView":504
* """Only used if instantiated manually by the user, or if Cython doesn't
* know how to convert the type"""
* import struct # <<<<<<<<<<<<<<
* cdef char c
* cdef bytes bytesvalue
*/
__pyx_t_1 = __Pyx_Import(__pyx_n_s_struct, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 504, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_v_struct = __pyx_t_1;
__pyx_t_1 = 0;
/* "View.MemoryView":509
* cdef Py_ssize_t i
*
* if isinstance(value, tuple): # <<<<<<<<<<<<<<
* bytesvalue = struct.pack(self.view.format, *value)
* else:
*/
__pyx_t_2 = PyTuple_Check(__pyx_v_value);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* "View.MemoryView":510
*
* if isinstance(value, tuple):
* bytesvalue = struct.pack(self.view.format, *value) # <<<<<<<<<<<<<<
* else:
* bytesvalue = struct.pack(self.view.format, value)
*/
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_struct, __pyx_n_s_pack); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 510, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_4 = __Pyx_PyBytes_FromString(__pyx_v_self->view.format); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 510, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 510, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4);
__pyx_t_4 = 0;
__pyx_t_4 = __Pyx_PySequence_Tuple(__pyx_v_value); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 510, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_6 = PyNumber_Add(__pyx_t_5, __pyx_t_4); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 510, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_6, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 510, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
if (!(likely(PyBytes_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytes", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(2, 510, __pyx_L1_error)
__pyx_v_bytesvalue = ((PyObject*)__pyx_t_4);
__pyx_t_4 = 0;
/* "View.MemoryView":509
* cdef Py_ssize_t i
*
* if isinstance(value, tuple): # <<<<<<<<<<<<<<
* bytesvalue = struct.pack(self.view.format, *value)
* else:
*/
goto __pyx_L3;
}
/* "View.MemoryView":512
* bytesvalue = struct.pack(self.view.format, *value)
* else:
* bytesvalue = struct.pack(self.view.format, value) # <<<<<<<<<<<<<<
*
* for i, c in enumerate(bytesvalue):
*/
/*else*/ {
__pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_struct, __pyx_n_s_pack); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 512, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__pyx_t_1 = __Pyx_PyBytes_FromString(__pyx_v_self->view.format); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 512, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_5 = NULL;
__pyx_t_7 = 0;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) {
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_6);
if (likely(__pyx_t_5)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);
__Pyx_INCREF(__pyx_t_5);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_6, function);
__pyx_t_7 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_6)) {
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_1, __pyx_v_value};
__pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 512, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) {
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_1, __pyx_v_value};
__pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 512, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
} else
#endif
{
__pyx_t_8 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 512, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_8);
if (__pyx_t_5) {
__Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_5); __pyx_t_5 = NULL;
}
__Pyx_GIVEREF(__pyx_t_1);
PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_t_1);
__Pyx_INCREF(__pyx_v_value);
__Pyx_GIVEREF(__pyx_v_value);
PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, __pyx_v_value);
__pyx_t_1 = 0;
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 512, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
}
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
if (!(likely(PyBytes_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytes", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(2, 512, __pyx_L1_error)
__pyx_v_bytesvalue = ((PyObject*)__pyx_t_4);
__pyx_t_4 = 0;
}
__pyx_L3:;
/* "View.MemoryView":514
* bytesvalue = struct.pack(self.view.format, value)
*
* for i, c in enumerate(bytesvalue): # <<<<<<<<<<<<<<
* itemp[i] = c
*
*/
__pyx_t_9 = 0;
if (unlikely(__pyx_v_bytesvalue == Py_None)) {
PyErr_SetString(PyExc_TypeError, "'NoneType' is not iterable");
__PYX_ERR(2, 514, __pyx_L1_error)
}
__Pyx_INCREF(__pyx_v_bytesvalue);
__pyx_t_10 = __pyx_v_bytesvalue;
__pyx_t_12 = PyBytes_AS_STRING(__pyx_t_10);
__pyx_t_13 = (__pyx_t_12 + PyBytes_GET_SIZE(__pyx_t_10));
for (__pyx_t_14 = __pyx_t_12; __pyx_t_14 < __pyx_t_13; __pyx_t_14++) {
__pyx_t_11 = __pyx_t_14;
__pyx_v_c = (__pyx_t_11[0]);
/* "View.MemoryView":515
*
* for i, c in enumerate(bytesvalue):
* itemp[i] = c # <<<<<<<<<<<<<<
*
* @cname('getbuffer')
*/
__pyx_v_i = __pyx_t_9;
/* "View.MemoryView":514
* bytesvalue = struct.pack(self.view.format, value)
*
* for i, c in enumerate(bytesvalue): # <<<<<<<<<<<<<<
* itemp[i] = c
*
*/
__pyx_t_9 = (__pyx_t_9 + 1);
/* "View.MemoryView":515
*
* for i, c in enumerate(bytesvalue):
* itemp[i] = c # <<<<<<<<<<<<<<
*
* @cname('getbuffer')
*/
(__pyx_v_itemp[__pyx_v_i]) = __pyx_v_c;
}
__Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;
/* "View.MemoryView":501
* return result
*
* cdef assign_item_from_object(self, char *itemp, object value): # <<<<<<<<<<<<<<
* """Only used if instantiated manually by the user, or if Cython doesn't
* know how to convert the type"""
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_XDECREF(__pyx_t_10);
__Pyx_AddTraceback("View.MemoryView.memoryview.assign_item_from_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_struct);
__Pyx_XDECREF(__pyx_v_bytesvalue);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":518
*
* @cname('getbuffer')
* def __getbuffer__(self, Py_buffer *info, int flags): # <<<<<<<<<<<<<<
* if flags & PyBUF_WRITABLE and self.view.readonly:
* raise ValueError("Cannot create writable memory view from read-only memoryview")
*/
/* Python wrapper */
static CYTHON_UNUSED int __pyx_memoryview_getbuffer(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/
static CYTHON_UNUSED int __pyx_memoryview_getbuffer(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_8__getbuffer__(((struct __pyx_memoryview_obj *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_8__getbuffer__(struct __pyx_memoryview_obj *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
Py_ssize_t *__pyx_t_4;
char *__pyx_t_5;
void *__pyx_t_6;
int __pyx_t_7;
Py_ssize_t __pyx_t_8;
if (__pyx_v_info == NULL) {
PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete");
return -1;
}
__Pyx_RefNannySetupContext("__getbuffer__", 0);
__pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None);
__Pyx_GIVEREF(__pyx_v_info->obj);
/* "View.MemoryView":519
* @cname('getbuffer')
* def __getbuffer__(self, Py_buffer *info, int flags):
* if flags & PyBUF_WRITABLE and self.view.readonly: # <<<<<<<<<<<<<<
* raise ValueError("Cannot create writable memory view from read-only memoryview")
*
*/
__pyx_t_2 = ((__pyx_v_flags & PyBUF_WRITABLE) != 0);
if (__pyx_t_2) {
} else {
__pyx_t_1 = __pyx_t_2;
goto __pyx_L4_bool_binop_done;
}
__pyx_t_2 = (__pyx_v_self->view.readonly != 0);
__pyx_t_1 = __pyx_t_2;
__pyx_L4_bool_binop_done:;
if (unlikely(__pyx_t_1)) {
/* "View.MemoryView":520
* def __getbuffer__(self, Py_buffer *info, int flags):
* if flags & PyBUF_WRITABLE and self.view.readonly:
* raise ValueError("Cannot create writable memory view from read-only memoryview") # <<<<<<<<<<<<<<
*
* if flags & PyBUF_ND:
*/
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__18, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 520, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(2, 520, __pyx_L1_error)
/* "View.MemoryView":519
* @cname('getbuffer')
* def __getbuffer__(self, Py_buffer *info, int flags):
* if flags & PyBUF_WRITABLE and self.view.readonly: # <<<<<<<<<<<<<<
* raise ValueError("Cannot create writable memory view from read-only memoryview")
*
*/
}
/* "View.MemoryView":522
* raise ValueError("Cannot create writable memory view from read-only memoryview")
*
* if flags & PyBUF_ND: # <<<<<<<<<<<<<<
* info.shape = self.view.shape
* else:
*/
__pyx_t_1 = ((__pyx_v_flags & PyBUF_ND) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":523
*
* if flags & PyBUF_ND:
* info.shape = self.view.shape # <<<<<<<<<<<<<<
* else:
* info.shape = NULL
*/
__pyx_t_4 = __pyx_v_self->view.shape;
__pyx_v_info->shape = __pyx_t_4;
/* "View.MemoryView":522
* raise ValueError("Cannot create writable memory view from read-only memoryview")
*
* if flags & PyBUF_ND: # <<<<<<<<<<<<<<
* info.shape = self.view.shape
* else:
*/
goto __pyx_L6;
}
/* "View.MemoryView":525
* info.shape = self.view.shape
* else:
* info.shape = NULL # <<<<<<<<<<<<<<
*
* if flags & PyBUF_STRIDES:
*/
/*else*/ {
__pyx_v_info->shape = NULL;
}
__pyx_L6:;
/* "View.MemoryView":527
* info.shape = NULL
*
* if flags & PyBUF_STRIDES: # <<<<<<<<<<<<<<
* info.strides = self.view.strides
* else:
*/
__pyx_t_1 = ((__pyx_v_flags & PyBUF_STRIDES) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":528
*
* if flags & PyBUF_STRIDES:
* info.strides = self.view.strides # <<<<<<<<<<<<<<
* else:
* info.strides = NULL
*/
__pyx_t_4 = __pyx_v_self->view.strides;
__pyx_v_info->strides = __pyx_t_4;
/* "View.MemoryView":527
* info.shape = NULL
*
* if flags & PyBUF_STRIDES: # <<<<<<<<<<<<<<
* info.strides = self.view.strides
* else:
*/
goto __pyx_L7;
}
/* "View.MemoryView":530
* info.strides = self.view.strides
* else:
* info.strides = NULL # <<<<<<<<<<<<<<
*
* if flags & PyBUF_INDIRECT:
*/
/*else*/ {
__pyx_v_info->strides = NULL;
}
__pyx_L7:;
/* "View.MemoryView":532
* info.strides = NULL
*
* if flags & PyBUF_INDIRECT: # <<<<<<<<<<<<<<
* info.suboffsets = self.view.suboffsets
* else:
*/
__pyx_t_1 = ((__pyx_v_flags & PyBUF_INDIRECT) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":533
*
* if flags & PyBUF_INDIRECT:
* info.suboffsets = self.view.suboffsets # <<<<<<<<<<<<<<
* else:
* info.suboffsets = NULL
*/
__pyx_t_4 = __pyx_v_self->view.suboffsets;
__pyx_v_info->suboffsets = __pyx_t_4;
/* "View.MemoryView":532
* info.strides = NULL
*
* if flags & PyBUF_INDIRECT: # <<<<<<<<<<<<<<
* info.suboffsets = self.view.suboffsets
* else:
*/
goto __pyx_L8;
}
/* "View.MemoryView":535
* info.suboffsets = self.view.suboffsets
* else:
* info.suboffsets = NULL # <<<<<<<<<<<<<<
*
* if flags & PyBUF_FORMAT:
*/
/*else*/ {
__pyx_v_info->suboffsets = NULL;
}
__pyx_L8:;
/* "View.MemoryView":537
* info.suboffsets = NULL
*
* if flags & PyBUF_FORMAT: # <<<<<<<<<<<<<<
* info.format = self.view.format
* else:
*/
__pyx_t_1 = ((__pyx_v_flags & PyBUF_FORMAT) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":538
*
* if flags & PyBUF_FORMAT:
* info.format = self.view.format # <<<<<<<<<<<<<<
* else:
* info.format = NULL
*/
__pyx_t_5 = __pyx_v_self->view.format;
__pyx_v_info->format = __pyx_t_5;
/* "View.MemoryView":537
* info.suboffsets = NULL
*
* if flags & PyBUF_FORMAT: # <<<<<<<<<<<<<<
* info.format = self.view.format
* else:
*/
goto __pyx_L9;
}
/* "View.MemoryView":540
* info.format = self.view.format
* else:
* info.format = NULL # <<<<<<<<<<<<<<
*
* info.buf = self.view.buf
*/
/*else*/ {
__pyx_v_info->format = NULL;
}
__pyx_L9:;
/* "View.MemoryView":542
* info.format = NULL
*
* info.buf = self.view.buf # <<<<<<<<<<<<<<
* info.ndim = self.view.ndim
* info.itemsize = self.view.itemsize
*/
__pyx_t_6 = __pyx_v_self->view.buf;
__pyx_v_info->buf = __pyx_t_6;
/* "View.MemoryView":543
*
* info.buf = self.view.buf
* info.ndim = self.view.ndim # <<<<<<<<<<<<<<
* info.itemsize = self.view.itemsize
* info.len = self.view.len
*/
__pyx_t_7 = __pyx_v_self->view.ndim;
__pyx_v_info->ndim = __pyx_t_7;
/* "View.MemoryView":544
* info.buf = self.view.buf
* info.ndim = self.view.ndim
* info.itemsize = self.view.itemsize # <<<<<<<<<<<<<<
* info.len = self.view.len
* info.readonly = self.view.readonly
*/
__pyx_t_8 = __pyx_v_self->view.itemsize;
__pyx_v_info->itemsize = __pyx_t_8;
/* "View.MemoryView":545
* info.ndim = self.view.ndim
* info.itemsize = self.view.itemsize
* info.len = self.view.len # <<<<<<<<<<<<<<
* info.readonly = self.view.readonly
* info.obj = self
*/
__pyx_t_8 = __pyx_v_self->view.len;
__pyx_v_info->len = __pyx_t_8;
/* "View.MemoryView":546
* info.itemsize = self.view.itemsize
* info.len = self.view.len
* info.readonly = self.view.readonly # <<<<<<<<<<<<<<
* info.obj = self
*
*/
__pyx_t_1 = __pyx_v_self->view.readonly;
__pyx_v_info->readonly = __pyx_t_1;
/* "View.MemoryView":547
* info.len = self.view.len
* info.readonly = self.view.readonly
* info.obj = self # <<<<<<<<<<<<<<
*
* __pyx_getbuffer = capsule(<void *> &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)")
*/
__Pyx_INCREF(((PyObject *)__pyx_v_self));
__Pyx_GIVEREF(((PyObject *)__pyx_v_self));
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj);
__pyx_v_info->obj = ((PyObject *)__pyx_v_self);
/* "View.MemoryView":518
*
* @cname('getbuffer')
* def __getbuffer__(self, Py_buffer *info, int flags): # <<<<<<<<<<<<<<
* if flags & PyBUF_WRITABLE and self.view.readonly:
* raise ValueError("Cannot create writable memory view from read-only memoryview")
*/
/* function exit code */
__pyx_r = 0;
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.memoryview.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
if (__pyx_v_info->obj != NULL) {
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
}
goto __pyx_L2;
__pyx_L0:;
if (__pyx_v_info->obj == Py_None) {
__Pyx_GOTREF(__pyx_v_info->obj);
__Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;
}
__pyx_L2:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":553
*
* @property
* def T(self): # <<<<<<<<<<<<<<
* cdef _memoryviewslice result = memoryview_copy(self)
* transpose_memslice(&result.from_slice)
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_1T_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_1T_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_1T___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_1T___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
struct __pyx_memoryviewslice_obj *__pyx_v_result = 0;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":554
* @property
* def T(self):
* cdef _memoryviewslice result = memoryview_copy(self) # <<<<<<<<<<<<<<
* transpose_memslice(&result.from_slice)
* return result
*/
__pyx_t_1 = __pyx_memoryview_copy_object(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 554, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_memoryviewslice_type))))) __PYX_ERR(2, 554, __pyx_L1_error)
__pyx_v_result = ((struct __pyx_memoryviewslice_obj *)__pyx_t_1);
__pyx_t_1 = 0;
/* "View.MemoryView":555
* def T(self):
* cdef _memoryviewslice result = memoryview_copy(self)
* transpose_memslice(&result.from_slice) # <<<<<<<<<<<<<<
* return result
*
*/
__pyx_t_2 = __pyx_memslice_transpose((&__pyx_v_result->from_slice)); if (unlikely(__pyx_t_2 == ((int)0))) __PYX_ERR(2, 555, __pyx_L1_error)
/* "View.MemoryView":556
* cdef _memoryviewslice result = memoryview_copy(self)
* transpose_memslice(&result.from_slice)
* return result # <<<<<<<<<<<<<<
*
* @property
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(((PyObject *)__pyx_v_result));
__pyx_r = ((PyObject *)__pyx_v_result);
goto __pyx_L0;
/* "View.MemoryView":553
*
* @property
* def T(self): # <<<<<<<<<<<<<<
* cdef _memoryviewslice result = memoryview_copy(self)
* transpose_memslice(&result.from_slice)
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.memoryview.T.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_result);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":559
*
* @property
* def base(self): # <<<<<<<<<<<<<<
* return self.obj
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_4base_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_4base_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_4base___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_4base___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":560
* @property
* def base(self):
* return self.obj # <<<<<<<<<<<<<<
*
* @property
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->obj);
__pyx_r = __pyx_v_self->obj;
goto __pyx_L0;
/* "View.MemoryView":559
*
* @property
* def base(self): # <<<<<<<<<<<<<<
* return self.obj
*
*/
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":563
*
* @property
* def shape(self): # <<<<<<<<<<<<<<
* return tuple([length for length in self.view.shape[:self.view.ndim]])
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_5shape_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_5shape_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_5shape___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_5shape___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
Py_ssize_t __pyx_v_length;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
Py_ssize_t *__pyx_t_2;
Py_ssize_t *__pyx_t_3;
Py_ssize_t *__pyx_t_4;
PyObject *__pyx_t_5 = NULL;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":564
* @property
* def shape(self):
* return tuple([length for length in self.view.shape[:self.view.ndim]]) # <<<<<<<<<<<<<<
*
* @property
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 564, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_3 = (__pyx_v_self->view.shape + __pyx_v_self->view.ndim);
for (__pyx_t_4 = __pyx_v_self->view.shape; __pyx_t_4 < __pyx_t_3; __pyx_t_4++) {
__pyx_t_2 = __pyx_t_4;
__pyx_v_length = (__pyx_t_2[0]);
__pyx_t_5 = PyInt_FromSsize_t(__pyx_v_length); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 564, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
if (unlikely(__Pyx_ListComp_Append(__pyx_t_1, (PyObject*)__pyx_t_5))) __PYX_ERR(2, 564, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
}
__pyx_t_5 = PyList_AsTuple(((PyObject*)__pyx_t_1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 564, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_r = __pyx_t_5;
__pyx_t_5 = 0;
goto __pyx_L0;
/* "View.MemoryView":563
*
* @property
* def shape(self): # <<<<<<<<<<<<<<
* return tuple([length for length in self.view.shape[:self.view.ndim]])
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView.memoryview.shape.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":567
*
* @property
* def strides(self): # <<<<<<<<<<<<<<
* if self.view.strides == NULL:
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_7strides_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_7strides_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_7strides___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_7strides___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
Py_ssize_t __pyx_v_stride;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
Py_ssize_t *__pyx_t_3;
Py_ssize_t *__pyx_t_4;
Py_ssize_t *__pyx_t_5;
PyObject *__pyx_t_6 = NULL;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":568
* @property
* def strides(self):
* if self.view.strides == NULL: # <<<<<<<<<<<<<<
*
* raise ValueError("Buffer view does not expose strides")
*/
__pyx_t_1 = ((__pyx_v_self->view.strides == NULL) != 0);
if (unlikely(__pyx_t_1)) {
/* "View.MemoryView":570
* if self.view.strides == NULL:
*
* raise ValueError("Buffer view does not expose strides") # <<<<<<<<<<<<<<
*
* return tuple([stride for stride in self.view.strides[:self.view.ndim]])
*/
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__19, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 570, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_Raise(__pyx_t_2, 0, 0, 0);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__PYX_ERR(2, 570, __pyx_L1_error)
/* "View.MemoryView":568
* @property
* def strides(self):
* if self.view.strides == NULL: # <<<<<<<<<<<<<<
*
* raise ValueError("Buffer view does not expose strides")
*/
}
/* "View.MemoryView":572
* raise ValueError("Buffer view does not expose strides")
*
* return tuple([stride for stride in self.view.strides[:self.view.ndim]]) # <<<<<<<<<<<<<<
*
* @property
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 572, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_4 = (__pyx_v_self->view.strides + __pyx_v_self->view.ndim);
for (__pyx_t_5 = __pyx_v_self->view.strides; __pyx_t_5 < __pyx_t_4; __pyx_t_5++) {
__pyx_t_3 = __pyx_t_5;
__pyx_v_stride = (__pyx_t_3[0]);
__pyx_t_6 = PyInt_FromSsize_t(__pyx_v_stride); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 572, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
if (unlikely(__Pyx_ListComp_Append(__pyx_t_2, (PyObject*)__pyx_t_6))) __PYX_ERR(2, 572, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
}
__pyx_t_6 = PyList_AsTuple(((PyObject*)__pyx_t_2)); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 572, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_r = __pyx_t_6;
__pyx_t_6 = 0;
goto __pyx_L0;
/* "View.MemoryView":567
*
* @property
* def strides(self): # <<<<<<<<<<<<<<
* if self.view.strides == NULL:
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_AddTraceback("View.MemoryView.memoryview.strides.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":575
*
* @property
* def suboffsets(self): # <<<<<<<<<<<<<<
* if self.view.suboffsets == NULL:
* return (-1,) * self.view.ndim
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_10suboffsets_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_10suboffsets_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_10suboffsets___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_10suboffsets___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
Py_ssize_t __pyx_v_suboffset;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
Py_ssize_t *__pyx_t_4;
Py_ssize_t *__pyx_t_5;
Py_ssize_t *__pyx_t_6;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":576
* @property
* def suboffsets(self):
* if self.view.suboffsets == NULL: # <<<<<<<<<<<<<<
* return (-1,) * self.view.ndim
*
*/
__pyx_t_1 = ((__pyx_v_self->view.suboffsets == NULL) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":577
* def suboffsets(self):
* if self.view.suboffsets == NULL:
* return (-1,) * self.view.ndim # <<<<<<<<<<<<<<
*
* return tuple([suboffset for suboffset in self.view.suboffsets[:self.view.ndim]])
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->view.ndim); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 577, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = PyNumber_Multiply(__pyx_tuple__20, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 577, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_r = __pyx_t_3;
__pyx_t_3 = 0;
goto __pyx_L0;
/* "View.MemoryView":576
* @property
* def suboffsets(self):
* if self.view.suboffsets == NULL: # <<<<<<<<<<<<<<
* return (-1,) * self.view.ndim
*
*/
}
/* "View.MemoryView":579
* return (-1,) * self.view.ndim
*
* return tuple([suboffset for suboffset in self.view.suboffsets[:self.view.ndim]]) # <<<<<<<<<<<<<<
*
* @property
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 579, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_5 = (__pyx_v_self->view.suboffsets + __pyx_v_self->view.ndim);
for (__pyx_t_6 = __pyx_v_self->view.suboffsets; __pyx_t_6 < __pyx_t_5; __pyx_t_6++) {
__pyx_t_4 = __pyx_t_6;
__pyx_v_suboffset = (__pyx_t_4[0]);
__pyx_t_2 = PyInt_FromSsize_t(__pyx_v_suboffset); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 579, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
if (unlikely(__Pyx_ListComp_Append(__pyx_t_3, (PyObject*)__pyx_t_2))) __PYX_ERR(2, 579, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
}
__pyx_t_2 = PyList_AsTuple(((PyObject*)__pyx_t_3)); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 579, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":575
*
* @property
* def suboffsets(self): # <<<<<<<<<<<<<<
* if self.view.suboffsets == NULL:
* return (-1,) * self.view.ndim
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.memoryview.suboffsets.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":582
*
* @property
* def ndim(self): # <<<<<<<<<<<<<<
* return self.view.ndim
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_4ndim_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_4ndim_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_4ndim___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_4ndim___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":583
* @property
* def ndim(self):
* return self.view.ndim # <<<<<<<<<<<<<<
*
* @property
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->view.ndim); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 583, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "View.MemoryView":582
*
* @property
* def ndim(self): # <<<<<<<<<<<<<<
* return self.view.ndim
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.memoryview.ndim.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":586
*
* @property
* def itemsize(self): # <<<<<<<<<<<<<<
* return self.view.itemsize
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_8itemsize_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_8itemsize_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_8itemsize___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_8itemsize___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":587
* @property
* def itemsize(self):
* return self.view.itemsize # <<<<<<<<<<<<<<
*
* @property
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = PyInt_FromSsize_t(__pyx_v_self->view.itemsize); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 587, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "View.MemoryView":586
*
* @property
* def itemsize(self): # <<<<<<<<<<<<<<
* return self.view.itemsize
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.memoryview.itemsize.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":590
*
* @property
* def nbytes(self): # <<<<<<<<<<<<<<
* return self.size * self.view.itemsize
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_6nbytes_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_6nbytes_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_6nbytes___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_6nbytes___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":591
* @property
* def nbytes(self):
* return self.size * self.view.itemsize # <<<<<<<<<<<<<<
*
* @property
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 591, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = PyInt_FromSsize_t(__pyx_v_self->view.itemsize); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 591, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = PyNumber_Multiply(__pyx_t_1, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 591, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_r = __pyx_t_3;
__pyx_t_3 = 0;
goto __pyx_L0;
/* "View.MemoryView":590
*
* @property
* def nbytes(self): # <<<<<<<<<<<<<<
* return self.size * self.view.itemsize
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.memoryview.nbytes.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":594
*
* @property
* def size(self): # <<<<<<<<<<<<<<
* if self._size is None:
* result = 1
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_4size_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_10memoryview_4size_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_10memoryview_4size___get__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_10memoryview_4size___get__(struct __pyx_memoryview_obj *__pyx_v_self) {
PyObject *__pyx_v_result = NULL;
PyObject *__pyx_v_length = NULL;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
Py_ssize_t *__pyx_t_3;
Py_ssize_t *__pyx_t_4;
Py_ssize_t *__pyx_t_5;
PyObject *__pyx_t_6 = NULL;
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":595
* @property
* def size(self):
* if self._size is None: # <<<<<<<<<<<<<<
* result = 1
*
*/
__pyx_t_1 = (__pyx_v_self->_size == Py_None);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "View.MemoryView":596
* def size(self):
* if self._size is None:
* result = 1 # <<<<<<<<<<<<<<
*
* for length in self.view.shape[:self.view.ndim]:
*/
__Pyx_INCREF(__pyx_int_1);
__pyx_v_result = __pyx_int_1;
/* "View.MemoryView":598
* result = 1
*
* for length in self.view.shape[:self.view.ndim]: # <<<<<<<<<<<<<<
* result *= length
*
*/
__pyx_t_4 = (__pyx_v_self->view.shape + __pyx_v_self->view.ndim);
for (__pyx_t_5 = __pyx_v_self->view.shape; __pyx_t_5 < __pyx_t_4; __pyx_t_5++) {
__pyx_t_3 = __pyx_t_5;
__pyx_t_6 = PyInt_FromSsize_t((__pyx_t_3[0])); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 598, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__Pyx_XDECREF_SET(__pyx_v_length, __pyx_t_6);
__pyx_t_6 = 0;
/* "View.MemoryView":599
*
* for length in self.view.shape[:self.view.ndim]:
* result *= length # <<<<<<<<<<<<<<
*
* self._size = result
*/
__pyx_t_6 = PyNumber_InPlaceMultiply(__pyx_v_result, __pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 599, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__Pyx_DECREF_SET(__pyx_v_result, __pyx_t_6);
__pyx_t_6 = 0;
}
/* "View.MemoryView":601
* result *= length
*
* self._size = result # <<<<<<<<<<<<<<
*
* return self._size
*/
__Pyx_INCREF(__pyx_v_result);
__Pyx_GIVEREF(__pyx_v_result);
__Pyx_GOTREF(__pyx_v_self->_size);
__Pyx_DECREF(__pyx_v_self->_size);
__pyx_v_self->_size = __pyx_v_result;
/* "View.MemoryView":595
* @property
* def size(self):
* if self._size is None: # <<<<<<<<<<<<<<
* result = 1
*
*/
}
/* "View.MemoryView":603
* self._size = result
*
* return self._size # <<<<<<<<<<<<<<
*
* def __len__(self):
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->_size);
__pyx_r = __pyx_v_self->_size;
goto __pyx_L0;
/* "View.MemoryView":594
*
* @property
* def size(self): # <<<<<<<<<<<<<<
* if self._size is None:
* result = 1
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_6);
__Pyx_AddTraceback("View.MemoryView.memoryview.size.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_result);
__Pyx_XDECREF(__pyx_v_length);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":605
* return self._size
*
* def __len__(self): # <<<<<<<<<<<<<<
* if self.view.ndim >= 1:
* return self.view.shape[0]
*/
/* Python wrapper */
static Py_ssize_t __pyx_memoryview___len__(PyObject *__pyx_v_self); /*proto*/
static Py_ssize_t __pyx_memoryview___len__(PyObject *__pyx_v_self) {
Py_ssize_t __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__len__ (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_10__len__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static Py_ssize_t __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_10__len__(struct __pyx_memoryview_obj *__pyx_v_self) {
Py_ssize_t __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
__Pyx_RefNannySetupContext("__len__", 0);
/* "View.MemoryView":606
*
* def __len__(self):
* if self.view.ndim >= 1: # <<<<<<<<<<<<<<
* return self.view.shape[0]
*
*/
__pyx_t_1 = ((__pyx_v_self->view.ndim >= 1) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":607
* def __len__(self):
* if self.view.ndim >= 1:
* return self.view.shape[0] # <<<<<<<<<<<<<<
*
* return 0
*/
__pyx_r = (__pyx_v_self->view.shape[0]);
goto __pyx_L0;
/* "View.MemoryView":606
*
* def __len__(self):
* if self.view.ndim >= 1: # <<<<<<<<<<<<<<
* return self.view.shape[0]
*
*/
}
/* "View.MemoryView":609
* return self.view.shape[0]
*
* return 0 # <<<<<<<<<<<<<<
*
* def __repr__(self):
*/
__pyx_r = 0;
goto __pyx_L0;
/* "View.MemoryView":605
* return self._size
*
* def __len__(self): # <<<<<<<<<<<<<<
* if self.view.ndim >= 1:
* return self.view.shape[0]
*/
/* function exit code */
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":611
* return 0
*
* def __repr__(self): # <<<<<<<<<<<<<<
* return "<MemoryView of %r at 0x%x>" % (self.base.__class__.__name__,
* id(self))
*/
/* Python wrapper */
static PyObject *__pyx_memoryview___repr__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_memoryview___repr__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__repr__ (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_12__repr__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_12__repr__(struct __pyx_memoryview_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
__Pyx_RefNannySetupContext("__repr__", 0);
/* "View.MemoryView":612
*
* def __repr__(self):
* return "<MemoryView of %r at 0x%x>" % (self.base.__class__.__name__, # <<<<<<<<<<<<<<
* id(self))
*
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_base); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 612, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 612, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_name_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 612, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
/* "View.MemoryView":613
* def __repr__(self):
* return "<MemoryView of %r at 0x%x>" % (self.base.__class__.__name__,
* id(self)) # <<<<<<<<<<<<<<
*
* def __str__(self):
*/
__pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_builtin_id, ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 613, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
/* "View.MemoryView":612
*
* def __repr__(self):
* return "<MemoryView of %r at 0x%x>" % (self.base.__class__.__name__, # <<<<<<<<<<<<<<
* id(self))
*
*/
__pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 612, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_GIVEREF(__pyx_t_1);
PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2);
__pyx_t_1 = 0;
__pyx_t_2 = 0;
__pyx_t_2 = __Pyx_PyString_Format(__pyx_kp_s_MemoryView_of_r_at_0x_x, __pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 612, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":611
* return 0
*
* def __repr__(self): # <<<<<<<<<<<<<<
* return "<MemoryView of %r at 0x%x>" % (self.base.__class__.__name__,
* id(self))
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.memoryview.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":615
* id(self))
*
* def __str__(self): # <<<<<<<<<<<<<<
* return "<MemoryView of %r object>" % (self.base.__class__.__name__,)
*
*/
/* Python wrapper */
static PyObject *__pyx_memoryview___str__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_memoryview___str__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__str__ (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_14__str__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_14__str__(struct __pyx_memoryview_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("__str__", 0);
/* "View.MemoryView":616
*
* def __str__(self):
* return "<MemoryView of %r object>" % (self.base.__class__.__name__,) # <<<<<<<<<<<<<<
*
*
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_base); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 616, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 616, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_name_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 616, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 616, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_GIVEREF(__pyx_t_1);
PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1);
__pyx_t_1 = 0;
__pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_MemoryView_of_r_object, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 616, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "View.MemoryView":615
* id(self))
*
* def __str__(self): # <<<<<<<<<<<<<<
* return "<MemoryView of %r object>" % (self.base.__class__.__name__,)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView.memoryview.__str__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":619
*
*
* def is_c_contig(self): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice *mslice
* cdef __Pyx_memviewslice tmp
*/
/* Python wrapper */
static PyObject *__pyx_memoryview_is_c_contig(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_memoryview_is_c_contig(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("is_c_contig (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_16is_c_contig(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_16is_c_contig(struct __pyx_memoryview_obj *__pyx_v_self) {
__Pyx_memviewslice *__pyx_v_mslice;
__Pyx_memviewslice __pyx_v_tmp;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_memviewslice *__pyx_t_1;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("is_c_contig", 0);
/* "View.MemoryView":622
* cdef __Pyx_memviewslice *mslice
* cdef __Pyx_memviewslice tmp
* mslice = get_slice_from_memview(self, &tmp) # <<<<<<<<<<<<<<
* return slice_is_contig(mslice[0], 'C', self.view.ndim)
*
*/
__pyx_t_1 = __pyx_memoryview_get_slice_from_memoryview(__pyx_v_self, (&__pyx_v_tmp)); if (unlikely(__pyx_t_1 == ((__Pyx_memviewslice *)NULL))) __PYX_ERR(2, 622, __pyx_L1_error)
__pyx_v_mslice = __pyx_t_1;
/* "View.MemoryView":623
* cdef __Pyx_memviewslice tmp
* mslice = get_slice_from_memview(self, &tmp)
* return slice_is_contig(mslice[0], 'C', self.view.ndim) # <<<<<<<<<<<<<<
*
* def is_f_contig(self):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_memviewslice_is_contig((__pyx_v_mslice[0]), 'C', __pyx_v_self->view.ndim)); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 623, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":619
*
*
* def is_c_contig(self): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice *mslice
* cdef __Pyx_memviewslice tmp
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView.memoryview.is_c_contig", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":625
* return slice_is_contig(mslice[0], 'C', self.view.ndim)
*
* def is_f_contig(self): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice *mslice
* cdef __Pyx_memviewslice tmp
*/
/* Python wrapper */
static PyObject *__pyx_memoryview_is_f_contig(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_memoryview_is_f_contig(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("is_f_contig (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_18is_f_contig(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_18is_f_contig(struct __pyx_memoryview_obj *__pyx_v_self) {
__Pyx_memviewslice *__pyx_v_mslice;
__Pyx_memviewslice __pyx_v_tmp;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_memviewslice *__pyx_t_1;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("is_f_contig", 0);
/* "View.MemoryView":628
* cdef __Pyx_memviewslice *mslice
* cdef __Pyx_memviewslice tmp
* mslice = get_slice_from_memview(self, &tmp) # <<<<<<<<<<<<<<
* return slice_is_contig(mslice[0], 'F', self.view.ndim)
*
*/
__pyx_t_1 = __pyx_memoryview_get_slice_from_memoryview(__pyx_v_self, (&__pyx_v_tmp)); if (unlikely(__pyx_t_1 == ((__Pyx_memviewslice *)NULL))) __PYX_ERR(2, 628, __pyx_L1_error)
__pyx_v_mslice = __pyx_t_1;
/* "View.MemoryView":629
* cdef __Pyx_memviewslice tmp
* mslice = get_slice_from_memview(self, &tmp)
* return slice_is_contig(mslice[0], 'F', self.view.ndim) # <<<<<<<<<<<<<<
*
* def copy(self):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_memviewslice_is_contig((__pyx_v_mslice[0]), 'F', __pyx_v_self->view.ndim)); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 629, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":625
* return slice_is_contig(mslice[0], 'C', self.view.ndim)
*
* def is_f_contig(self): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice *mslice
* cdef __Pyx_memviewslice tmp
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView.memoryview.is_f_contig", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":631
* return slice_is_contig(mslice[0], 'F', self.view.ndim)
*
* def copy(self): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice mslice
* cdef int flags = self.flags & ~PyBUF_F_CONTIGUOUS
*/
/* Python wrapper */
static PyObject *__pyx_memoryview_copy(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_memoryview_copy(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("copy (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_20copy(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_20copy(struct __pyx_memoryview_obj *__pyx_v_self) {
__Pyx_memviewslice __pyx_v_mslice;
int __pyx_v_flags;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_memviewslice __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("copy", 0);
/* "View.MemoryView":633
* def copy(self):
* cdef __Pyx_memviewslice mslice
* cdef int flags = self.flags & ~PyBUF_F_CONTIGUOUS # <<<<<<<<<<<<<<
*
* slice_copy(self, &mslice)
*/
__pyx_v_flags = (__pyx_v_self->flags & (~PyBUF_F_CONTIGUOUS));
/* "View.MemoryView":635
* cdef int flags = self.flags & ~PyBUF_F_CONTIGUOUS
*
* slice_copy(self, &mslice) # <<<<<<<<<<<<<<
* mslice = slice_copy_contig(&mslice, "c", self.view.ndim,
* self.view.itemsize,
*/
__pyx_memoryview_slice_copy(__pyx_v_self, (&__pyx_v_mslice));
/* "View.MemoryView":636
*
* slice_copy(self, &mslice)
* mslice = slice_copy_contig(&mslice, "c", self.view.ndim, # <<<<<<<<<<<<<<
* self.view.itemsize,
* flags|PyBUF_C_CONTIGUOUS,
*/
__pyx_t_1 = __pyx_memoryview_copy_new_contig((&__pyx_v_mslice), ((char *)"c"), __pyx_v_self->view.ndim, __pyx_v_self->view.itemsize, (__pyx_v_flags | PyBUF_C_CONTIGUOUS), __pyx_v_self->dtype_is_object); if (unlikely(PyErr_Occurred())) __PYX_ERR(2, 636, __pyx_L1_error)
__pyx_v_mslice = __pyx_t_1;
/* "View.MemoryView":641
* self.dtype_is_object)
*
* return memoryview_copy_from_slice(self, &mslice) # <<<<<<<<<<<<<<
*
* def copy_fortran(self):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __pyx_memoryview_copy_object_from_slice(__pyx_v_self, (&__pyx_v_mslice)); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 641, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":631
* return slice_is_contig(mslice[0], 'F', self.view.ndim)
*
* def copy(self): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice mslice
* cdef int flags = self.flags & ~PyBUF_F_CONTIGUOUS
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView.memoryview.copy", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":643
* return memoryview_copy_from_slice(self, &mslice)
*
* def copy_fortran(self): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice src, dst
* cdef int flags = self.flags & ~PyBUF_C_CONTIGUOUS
*/
/* Python wrapper */
static PyObject *__pyx_memoryview_copy_fortran(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_memoryview_copy_fortran(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("copy_fortran (wrapper)", 0);
__pyx_r = __pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_22copy_fortran(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_memoryview___pyx_pf_15View_dot_MemoryView_10memoryview_22copy_fortran(struct __pyx_memoryview_obj *__pyx_v_self) {
__Pyx_memviewslice __pyx_v_src;
__Pyx_memviewslice __pyx_v_dst;
int __pyx_v_flags;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_memviewslice __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("copy_fortran", 0);
/* "View.MemoryView":645
* def copy_fortran(self):
* cdef __Pyx_memviewslice src, dst
* cdef int flags = self.flags & ~PyBUF_C_CONTIGUOUS # <<<<<<<<<<<<<<
*
* slice_copy(self, &src)
*/
__pyx_v_flags = (__pyx_v_self->flags & (~PyBUF_C_CONTIGUOUS));
/* "View.MemoryView":647
* cdef int flags = self.flags & ~PyBUF_C_CONTIGUOUS
*
* slice_copy(self, &src) # <<<<<<<<<<<<<<
* dst = slice_copy_contig(&src, "fortran", self.view.ndim,
* self.view.itemsize,
*/
__pyx_memoryview_slice_copy(__pyx_v_self, (&__pyx_v_src));
/* "View.MemoryView":648
*
* slice_copy(self, &src)
* dst = slice_copy_contig(&src, "fortran", self.view.ndim, # <<<<<<<<<<<<<<
* self.view.itemsize,
* flags|PyBUF_F_CONTIGUOUS,
*/
__pyx_t_1 = __pyx_memoryview_copy_new_contig((&__pyx_v_src), ((char *)"fortran"), __pyx_v_self->view.ndim, __pyx_v_self->view.itemsize, (__pyx_v_flags | PyBUF_F_CONTIGUOUS), __pyx_v_self->dtype_is_object); if (unlikely(PyErr_Occurred())) __PYX_ERR(2, 648, __pyx_L1_error)
__pyx_v_dst = __pyx_t_1;
/* "View.MemoryView":653
* self.dtype_is_object)
*
* return memoryview_copy_from_slice(self, &dst) # <<<<<<<<<<<<<<
*
*
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __pyx_memoryview_copy_object_from_slice(__pyx_v_self, (&__pyx_v_dst)); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 653, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":643
* return memoryview_copy_from_slice(self, &mslice)
*
* def copy_fortran(self): # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice src, dst
* cdef int flags = self.flags & ~PyBUF_C_CONTIGUOUS
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView.memoryview.copy_fortran", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* Python wrapper */
static PyObject *__pyx_pw___pyx_memoryview_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw___pyx_memoryview_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf___pyx_memoryview___reduce_cython__(((struct __pyx_memoryview_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf___pyx_memoryview___reduce_cython__(CYTHON_UNUSED struct __pyx_memoryview_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__21, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(2, 2, __pyx_L1_error)
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.memoryview.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* Python wrapper */
static PyObject *__pyx_pw___pyx_memoryview_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
static PyObject *__pyx_pw___pyx_memoryview_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf___pyx_memoryview_2__setstate_cython__(((struct __pyx_memoryview_obj *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf___pyx_memoryview_2__setstate_cython__(CYTHON_UNUSED struct __pyx_memoryview_obj *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__22, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(2, 4, __pyx_L1_error)
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.memoryview.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":657
*
* @cname('__pyx_memoryview_new')
* cdef memoryview_cwrapper(object o, int flags, bint dtype_is_object, __Pyx_TypeInfo *typeinfo): # <<<<<<<<<<<<<<
* cdef memoryview result = memoryview(o, flags, dtype_is_object)
* result.typeinfo = typeinfo
*/
static PyObject *__pyx_memoryview_new(PyObject *__pyx_v_o, int __pyx_v_flags, int __pyx_v_dtype_is_object, __Pyx_TypeInfo *__pyx_v_typeinfo) {
struct __pyx_memoryview_obj *__pyx_v_result = 0;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
__Pyx_RefNannySetupContext("memoryview_cwrapper", 0);
/* "View.MemoryView":658
* @cname('__pyx_memoryview_new')
* cdef memoryview_cwrapper(object o, int flags, bint dtype_is_object, __Pyx_TypeInfo *typeinfo):
* cdef memoryview result = memoryview(o, flags, dtype_is_object) # <<<<<<<<<<<<<<
* result.typeinfo = typeinfo
* return result
*/
__pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_flags); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 658, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_dtype_is_object); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 658, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 658, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_INCREF(__pyx_v_o);
__Pyx_GIVEREF(__pyx_v_o);
PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_o);
__Pyx_GIVEREF(__pyx_t_1);
PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_2);
__pyx_t_1 = 0;
__pyx_t_2 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)__pyx_memoryview_type), __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 658, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_v_result = ((struct __pyx_memoryview_obj *)__pyx_t_2);
__pyx_t_2 = 0;
/* "View.MemoryView":659
* cdef memoryview_cwrapper(object o, int flags, bint dtype_is_object, __Pyx_TypeInfo *typeinfo):
* cdef memoryview result = memoryview(o, flags, dtype_is_object)
* result.typeinfo = typeinfo # <<<<<<<<<<<<<<
* return result
*
*/
__pyx_v_result->typeinfo = __pyx_v_typeinfo;
/* "View.MemoryView":660
* cdef memoryview result = memoryview(o, flags, dtype_is_object)
* result.typeinfo = typeinfo
* return result # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_check')
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(((PyObject *)__pyx_v_result));
__pyx_r = ((PyObject *)__pyx_v_result);
goto __pyx_L0;
/* "View.MemoryView":657
*
* @cname('__pyx_memoryview_new')
* cdef memoryview_cwrapper(object o, int flags, bint dtype_is_object, __Pyx_TypeInfo *typeinfo): # <<<<<<<<<<<<<<
* cdef memoryview result = memoryview(o, flags, dtype_is_object)
* result.typeinfo = typeinfo
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.memoryview_cwrapper", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_result);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":663
*
* @cname('__pyx_memoryview_check')
* cdef inline bint memoryview_check(object o): # <<<<<<<<<<<<<<
* return isinstance(o, memoryview)
*
*/
static CYTHON_INLINE int __pyx_memoryview_check(PyObject *__pyx_v_o) {
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
__Pyx_RefNannySetupContext("memoryview_check", 0);
/* "View.MemoryView":664
* @cname('__pyx_memoryview_check')
* cdef inline bint memoryview_check(object o):
* return isinstance(o, memoryview) # <<<<<<<<<<<<<<
*
* cdef tuple _unellipsify(object index, int ndim):
*/
__pyx_t_1 = __Pyx_TypeCheck(__pyx_v_o, __pyx_memoryview_type);
__pyx_r = __pyx_t_1;
goto __pyx_L0;
/* "View.MemoryView":663
*
* @cname('__pyx_memoryview_check')
* cdef inline bint memoryview_check(object o): # <<<<<<<<<<<<<<
* return isinstance(o, memoryview)
*
*/
/* function exit code */
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":666
* return isinstance(o, memoryview)
*
* cdef tuple _unellipsify(object index, int ndim): # <<<<<<<<<<<<<<
* """
* Replace all ellipses with full slices and fill incomplete indices with
*/
static PyObject *_unellipsify(PyObject *__pyx_v_index, int __pyx_v_ndim) {
PyObject *__pyx_v_tup = NULL;
PyObject *__pyx_v_result = NULL;
int __pyx_v_have_slices;
int __pyx_v_seen_ellipsis;
CYTHON_UNUSED PyObject *__pyx_v_idx = NULL;
PyObject *__pyx_v_item = NULL;
Py_ssize_t __pyx_v_nslices;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
Py_ssize_t __pyx_t_5;
PyObject *(*__pyx_t_6)(PyObject *);
PyObject *__pyx_t_7 = NULL;
Py_ssize_t __pyx_t_8;
int __pyx_t_9;
int __pyx_t_10;
PyObject *__pyx_t_11 = NULL;
__Pyx_RefNannySetupContext("_unellipsify", 0);
/* "View.MemoryView":671
* full slices.
* """
* if not isinstance(index, tuple): # <<<<<<<<<<<<<<
* tup = (index,)
* else:
*/
__pyx_t_1 = PyTuple_Check(__pyx_v_index);
__pyx_t_2 = ((!(__pyx_t_1 != 0)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":672
* """
* if not isinstance(index, tuple):
* tup = (index,) # <<<<<<<<<<<<<<
* else:
* tup = index
*/
__pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 672, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_INCREF(__pyx_v_index);
__Pyx_GIVEREF(__pyx_v_index);
PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_index);
__pyx_v_tup = __pyx_t_3;
__pyx_t_3 = 0;
/* "View.MemoryView":671
* full slices.
* """
* if not isinstance(index, tuple): # <<<<<<<<<<<<<<
* tup = (index,)
* else:
*/
goto __pyx_L3;
}
/* "View.MemoryView":674
* tup = (index,)
* else:
* tup = index # <<<<<<<<<<<<<<
*
* result = []
*/
/*else*/ {
__Pyx_INCREF(__pyx_v_index);
__pyx_v_tup = __pyx_v_index;
}
__pyx_L3:;
/* "View.MemoryView":676
* tup = index
*
* result = [] # <<<<<<<<<<<<<<
* have_slices = False
* seen_ellipsis = False
*/
__pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 676, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_v_result = ((PyObject*)__pyx_t_3);
__pyx_t_3 = 0;
/* "View.MemoryView":677
*
* result = []
* have_slices = False # <<<<<<<<<<<<<<
* seen_ellipsis = False
* for idx, item in enumerate(tup):
*/
__pyx_v_have_slices = 0;
/* "View.MemoryView":678
* result = []
* have_slices = False
* seen_ellipsis = False # <<<<<<<<<<<<<<
* for idx, item in enumerate(tup):
* if item is Ellipsis:
*/
__pyx_v_seen_ellipsis = 0;
/* "View.MemoryView":679
* have_slices = False
* seen_ellipsis = False
* for idx, item in enumerate(tup): # <<<<<<<<<<<<<<
* if item is Ellipsis:
* if not seen_ellipsis:
*/
__Pyx_INCREF(__pyx_int_0);
__pyx_t_3 = __pyx_int_0;
if (likely(PyList_CheckExact(__pyx_v_tup)) || PyTuple_CheckExact(__pyx_v_tup)) {
__pyx_t_4 = __pyx_v_tup; __Pyx_INCREF(__pyx_t_4); __pyx_t_5 = 0;
__pyx_t_6 = NULL;
} else {
__pyx_t_5 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_v_tup); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 679, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_6 = Py_TYPE(__pyx_t_4)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 679, __pyx_L1_error)
}
for (;;) {
if (likely(!__pyx_t_6)) {
if (likely(PyList_CheckExact(__pyx_t_4))) {
if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_4)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_7 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_5); __Pyx_INCREF(__pyx_t_7); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(2, 679, __pyx_L1_error)
#else
__pyx_t_7 = PySequence_ITEM(__pyx_t_4, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 679, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
#endif
} else {
if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_4)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_7 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_5); __Pyx_INCREF(__pyx_t_7); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(2, 679, __pyx_L1_error)
#else
__pyx_t_7 = PySequence_ITEM(__pyx_t_4, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 679, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
#endif
}
} else {
__pyx_t_7 = __pyx_t_6(__pyx_t_4);
if (unlikely(!__pyx_t_7)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(2, 679, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_7);
}
__Pyx_XDECREF_SET(__pyx_v_item, __pyx_t_7);
__pyx_t_7 = 0;
__Pyx_INCREF(__pyx_t_3);
__Pyx_XDECREF_SET(__pyx_v_idx, __pyx_t_3);
__pyx_t_7 = __Pyx_PyInt_AddObjC(__pyx_t_3, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 679, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__Pyx_DECREF(__pyx_t_3);
__pyx_t_3 = __pyx_t_7;
__pyx_t_7 = 0;
/* "View.MemoryView":680
* seen_ellipsis = False
* for idx, item in enumerate(tup):
* if item is Ellipsis: # <<<<<<<<<<<<<<
* if not seen_ellipsis:
* result.extend([slice(None)] * (ndim - len(tup) + 1))
*/
__pyx_t_2 = (__pyx_v_item == __pyx_builtin_Ellipsis);
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* "View.MemoryView":681
* for idx, item in enumerate(tup):
* if item is Ellipsis:
* if not seen_ellipsis: # <<<<<<<<<<<<<<
* result.extend([slice(None)] * (ndim - len(tup) + 1))
* seen_ellipsis = True
*/
__pyx_t_1 = ((!(__pyx_v_seen_ellipsis != 0)) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":682
* if item is Ellipsis:
* if not seen_ellipsis:
* result.extend([slice(None)] * (ndim - len(tup) + 1)) # <<<<<<<<<<<<<<
* seen_ellipsis = True
* else:
*/
__pyx_t_8 = PyObject_Length(__pyx_v_tup); if (unlikely(__pyx_t_8 == ((Py_ssize_t)-1))) __PYX_ERR(2, 682, __pyx_L1_error)
__pyx_t_7 = PyList_New(1 * ((((__pyx_v_ndim - __pyx_t_8) + 1)<0) ? 0:((__pyx_v_ndim - __pyx_t_8) + 1))); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 682, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
{ Py_ssize_t __pyx_temp;
for (__pyx_temp=0; __pyx_temp < ((__pyx_v_ndim - __pyx_t_8) + 1); __pyx_temp++) {
__Pyx_INCREF(__pyx_slice__23);
__Pyx_GIVEREF(__pyx_slice__23);
PyList_SET_ITEM(__pyx_t_7, __pyx_temp, __pyx_slice__23);
}
}
__pyx_t_9 = __Pyx_PyList_Extend(__pyx_v_result, __pyx_t_7); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(2, 682, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
/* "View.MemoryView":683
* if not seen_ellipsis:
* result.extend([slice(None)] * (ndim - len(tup) + 1))
* seen_ellipsis = True # <<<<<<<<<<<<<<
* else:
* result.append(slice(None))
*/
__pyx_v_seen_ellipsis = 1;
/* "View.MemoryView":681
* for idx, item in enumerate(tup):
* if item is Ellipsis:
* if not seen_ellipsis: # <<<<<<<<<<<<<<
* result.extend([slice(None)] * (ndim - len(tup) + 1))
* seen_ellipsis = True
*/
goto __pyx_L7;
}
/* "View.MemoryView":685
* seen_ellipsis = True
* else:
* result.append(slice(None)) # <<<<<<<<<<<<<<
* have_slices = True
* else:
*/
/*else*/ {
__pyx_t_9 = __Pyx_PyList_Append(__pyx_v_result, __pyx_slice__23); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(2, 685, __pyx_L1_error)
}
__pyx_L7:;
/* "View.MemoryView":686
* else:
* result.append(slice(None))
* have_slices = True # <<<<<<<<<<<<<<
* else:
* if not isinstance(item, slice) and not PyIndex_Check(item):
*/
__pyx_v_have_slices = 1;
/* "View.MemoryView":680
* seen_ellipsis = False
* for idx, item in enumerate(tup):
* if item is Ellipsis: # <<<<<<<<<<<<<<
* if not seen_ellipsis:
* result.extend([slice(None)] * (ndim - len(tup) + 1))
*/
goto __pyx_L6;
}
/* "View.MemoryView":688
* have_slices = True
* else:
* if not isinstance(item, slice) and not PyIndex_Check(item): # <<<<<<<<<<<<<<
* raise TypeError("Cannot index with type '%s'" % type(item))
*
*/
/*else*/ {
__pyx_t_2 = PySlice_Check(__pyx_v_item);
__pyx_t_10 = ((!(__pyx_t_2 != 0)) != 0);
if (__pyx_t_10) {
} else {
__pyx_t_1 = __pyx_t_10;
goto __pyx_L9_bool_binop_done;
}
__pyx_t_10 = ((!(PyIndex_Check(__pyx_v_item) != 0)) != 0);
__pyx_t_1 = __pyx_t_10;
__pyx_L9_bool_binop_done:;
if (unlikely(__pyx_t_1)) {
/* "View.MemoryView":689
* else:
* if not isinstance(item, slice) and not PyIndex_Check(item):
* raise TypeError("Cannot index with type '%s'" % type(item)) # <<<<<<<<<<<<<<
*
* have_slices = have_slices or isinstance(item, slice)
*/
__pyx_t_7 = __Pyx_PyString_FormatSafe(__pyx_kp_s_Cannot_index_with_type_s, ((PyObject *)Py_TYPE(__pyx_v_item))); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 689, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_11 = __Pyx_PyObject_CallOneArg(__pyx_builtin_TypeError, __pyx_t_7); if (unlikely(!__pyx_t_11)) __PYX_ERR(2, 689, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_11);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_Raise(__pyx_t_11, 0, 0, 0);
__Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;
__PYX_ERR(2, 689, __pyx_L1_error)
/* "View.MemoryView":688
* have_slices = True
* else:
* if not isinstance(item, slice) and not PyIndex_Check(item): # <<<<<<<<<<<<<<
* raise TypeError("Cannot index with type '%s'" % type(item))
*
*/
}
/* "View.MemoryView":691
* raise TypeError("Cannot index with type '%s'" % type(item))
*
* have_slices = have_slices or isinstance(item, slice) # <<<<<<<<<<<<<<
* result.append(item)
*
*/
__pyx_t_10 = (__pyx_v_have_slices != 0);
if (!__pyx_t_10) {
} else {
__pyx_t_1 = __pyx_t_10;
goto __pyx_L11_bool_binop_done;
}
__pyx_t_10 = PySlice_Check(__pyx_v_item);
__pyx_t_2 = (__pyx_t_10 != 0);
__pyx_t_1 = __pyx_t_2;
__pyx_L11_bool_binop_done:;
__pyx_v_have_slices = __pyx_t_1;
/* "View.MemoryView":692
*
* have_slices = have_slices or isinstance(item, slice)
* result.append(item) # <<<<<<<<<<<<<<
*
* nslices = ndim - len(result)
*/
__pyx_t_9 = __Pyx_PyList_Append(__pyx_v_result, __pyx_v_item); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(2, 692, __pyx_L1_error)
}
__pyx_L6:;
/* "View.MemoryView":679
* have_slices = False
* seen_ellipsis = False
* for idx, item in enumerate(tup): # <<<<<<<<<<<<<<
* if item is Ellipsis:
* if not seen_ellipsis:
*/
}
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "View.MemoryView":694
* result.append(item)
*
* nslices = ndim - len(result) # <<<<<<<<<<<<<<
* if nslices:
* result.extend([slice(None)] * nslices)
*/
__pyx_t_5 = PyList_GET_SIZE(__pyx_v_result); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(2, 694, __pyx_L1_error)
__pyx_v_nslices = (__pyx_v_ndim - __pyx_t_5);
/* "View.MemoryView":695
*
* nslices = ndim - len(result)
* if nslices: # <<<<<<<<<<<<<<
* result.extend([slice(None)] * nslices)
*
*/
__pyx_t_1 = (__pyx_v_nslices != 0);
if (__pyx_t_1) {
/* "View.MemoryView":696
* nslices = ndim - len(result)
* if nslices:
* result.extend([slice(None)] * nslices) # <<<<<<<<<<<<<<
*
* return have_slices or nslices, tuple(result)
*/
__pyx_t_3 = PyList_New(1 * ((__pyx_v_nslices<0) ? 0:__pyx_v_nslices)); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 696, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
{ Py_ssize_t __pyx_temp;
for (__pyx_temp=0; __pyx_temp < __pyx_v_nslices; __pyx_temp++) {
__Pyx_INCREF(__pyx_slice__23);
__Pyx_GIVEREF(__pyx_slice__23);
PyList_SET_ITEM(__pyx_t_3, __pyx_temp, __pyx_slice__23);
}
}
__pyx_t_9 = __Pyx_PyList_Extend(__pyx_v_result, __pyx_t_3); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(2, 696, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "View.MemoryView":695
*
* nslices = ndim - len(result)
* if nslices: # <<<<<<<<<<<<<<
* result.extend([slice(None)] * nslices)
*
*/
}
/* "View.MemoryView":698
* result.extend([slice(None)] * nslices)
*
* return have_slices or nslices, tuple(result) # <<<<<<<<<<<<<<
*
* cdef assert_direct_dimensions(Py_ssize_t *suboffsets, int ndim):
*/
__Pyx_XDECREF(__pyx_r);
if (!__pyx_v_have_slices) {
} else {
__pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_have_slices); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 698, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = __pyx_t_4;
__pyx_t_4 = 0;
goto __pyx_L14_bool_binop_done;
}
__pyx_t_4 = PyInt_FromSsize_t(__pyx_v_nslices); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 698, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_3 = __pyx_t_4;
__pyx_t_4 = 0;
__pyx_L14_bool_binop_done:;
__pyx_t_4 = PyList_AsTuple(__pyx_v_result); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 698, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_11 = PyTuple_New(2); if (unlikely(!__pyx_t_11)) __PYX_ERR(2, 698, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_11);
__Pyx_GIVEREF(__pyx_t_3);
PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_3);
__Pyx_GIVEREF(__pyx_t_4);
PyTuple_SET_ITEM(__pyx_t_11, 1, __pyx_t_4);
__pyx_t_3 = 0;
__pyx_t_4 = 0;
__pyx_r = ((PyObject*)__pyx_t_11);
__pyx_t_11 = 0;
goto __pyx_L0;
/* "View.MemoryView":666
* return isinstance(o, memoryview)
*
* cdef tuple _unellipsify(object index, int ndim): # <<<<<<<<<<<<<<
* """
* Replace all ellipses with full slices and fill incomplete indices with
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_11);
__Pyx_AddTraceback("View.MemoryView._unellipsify", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_tup);
__Pyx_XDECREF(__pyx_v_result);
__Pyx_XDECREF(__pyx_v_idx);
__Pyx_XDECREF(__pyx_v_item);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":700
* return have_slices or nslices, tuple(result)
*
* cdef assert_direct_dimensions(Py_ssize_t *suboffsets, int ndim): # <<<<<<<<<<<<<<
* for suboffset in suboffsets[:ndim]:
* if suboffset >= 0:
*/
static PyObject *assert_direct_dimensions(Py_ssize_t *__pyx_v_suboffsets, int __pyx_v_ndim) {
Py_ssize_t __pyx_v_suboffset;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
Py_ssize_t *__pyx_t_1;
Py_ssize_t *__pyx_t_2;
Py_ssize_t *__pyx_t_3;
int __pyx_t_4;
PyObject *__pyx_t_5 = NULL;
__Pyx_RefNannySetupContext("assert_direct_dimensions", 0);
/* "View.MemoryView":701
*
* cdef assert_direct_dimensions(Py_ssize_t *suboffsets, int ndim):
* for suboffset in suboffsets[:ndim]: # <<<<<<<<<<<<<<
* if suboffset >= 0:
* raise ValueError("Indirect dimensions not supported")
*/
__pyx_t_2 = (__pyx_v_suboffsets + __pyx_v_ndim);
for (__pyx_t_3 = __pyx_v_suboffsets; __pyx_t_3 < __pyx_t_2; __pyx_t_3++) {
__pyx_t_1 = __pyx_t_3;
__pyx_v_suboffset = (__pyx_t_1[0]);
/* "View.MemoryView":702
* cdef assert_direct_dimensions(Py_ssize_t *suboffsets, int ndim):
* for suboffset in suboffsets[:ndim]:
* if suboffset >= 0: # <<<<<<<<<<<<<<
* raise ValueError("Indirect dimensions not supported")
*
*/
__pyx_t_4 = ((__pyx_v_suboffset >= 0) != 0);
if (unlikely(__pyx_t_4)) {
/* "View.MemoryView":703
* for suboffset in suboffsets[:ndim]:
* if suboffset >= 0:
* raise ValueError("Indirect dimensions not supported") # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__24, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 703, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_Raise(__pyx_t_5, 0, 0, 0);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__PYX_ERR(2, 703, __pyx_L1_error)
/* "View.MemoryView":702
* cdef assert_direct_dimensions(Py_ssize_t *suboffsets, int ndim):
* for suboffset in suboffsets[:ndim]:
* if suboffset >= 0: # <<<<<<<<<<<<<<
* raise ValueError("Indirect dimensions not supported")
*
*/
}
}
/* "View.MemoryView":700
* return have_slices or nslices, tuple(result)
*
* cdef assert_direct_dimensions(Py_ssize_t *suboffsets, int ndim): # <<<<<<<<<<<<<<
* for suboffset in suboffsets[:ndim]:
* if suboffset >= 0:
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView.assert_direct_dimensions", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":710
*
* @cname('__pyx_memview_slice')
* cdef memoryview memview_slice(memoryview memview, object indices): # <<<<<<<<<<<<<<
* cdef int new_ndim = 0, suboffset_dim = -1, dim
* cdef bint negative_step
*/
static struct __pyx_memoryview_obj *__pyx_memview_slice(struct __pyx_memoryview_obj *__pyx_v_memview, PyObject *__pyx_v_indices) {
int __pyx_v_new_ndim;
int __pyx_v_suboffset_dim;
int __pyx_v_dim;
__Pyx_memviewslice __pyx_v_src;
__Pyx_memviewslice __pyx_v_dst;
__Pyx_memviewslice *__pyx_v_p_src;
struct __pyx_memoryviewslice_obj *__pyx_v_memviewsliceobj = 0;
__Pyx_memviewslice *__pyx_v_p_dst;
int *__pyx_v_p_suboffset_dim;
Py_ssize_t __pyx_v_start;
Py_ssize_t __pyx_v_stop;
Py_ssize_t __pyx_v_step;
int __pyx_v_have_start;
int __pyx_v_have_stop;
int __pyx_v_have_step;
PyObject *__pyx_v_index = NULL;
struct __pyx_memoryview_obj *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
struct __pyx_memoryview_obj *__pyx_t_4;
char *__pyx_t_5;
int __pyx_t_6;
Py_ssize_t __pyx_t_7;
PyObject *(*__pyx_t_8)(PyObject *);
PyObject *__pyx_t_9 = NULL;
Py_ssize_t __pyx_t_10;
int __pyx_t_11;
Py_ssize_t __pyx_t_12;
__Pyx_RefNannySetupContext("memview_slice", 0);
/* "View.MemoryView":711
* @cname('__pyx_memview_slice')
* cdef memoryview memview_slice(memoryview memview, object indices):
* cdef int new_ndim = 0, suboffset_dim = -1, dim # <<<<<<<<<<<<<<
* cdef bint negative_step
* cdef __Pyx_memviewslice src, dst
*/
__pyx_v_new_ndim = 0;
__pyx_v_suboffset_dim = -1;
/* "View.MemoryView":718
*
*
* memset(&dst, 0, sizeof(dst)) # <<<<<<<<<<<<<<
*
* cdef _memoryviewslice memviewsliceobj
*/
(void)(memset((&__pyx_v_dst), 0, (sizeof(__pyx_v_dst))));
/* "View.MemoryView":722
* cdef _memoryviewslice memviewsliceobj
*
* assert memview.view.ndim > 0 # <<<<<<<<<<<<<<
*
* if isinstance(memview, _memoryviewslice):
*/
#ifndef CYTHON_WITHOUT_ASSERTIONS
if (unlikely(!Py_OptimizeFlag)) {
if (unlikely(!((__pyx_v_memview->view.ndim > 0) != 0))) {
PyErr_SetNone(PyExc_AssertionError);
__PYX_ERR(2, 722, __pyx_L1_error)
}
}
#endif
/* "View.MemoryView":724
* assert memview.view.ndim > 0
*
* if isinstance(memview, _memoryviewslice): # <<<<<<<<<<<<<<
* memviewsliceobj = memview
* p_src = &memviewsliceobj.from_slice
*/
__pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_memview), __pyx_memoryviewslice_type);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "View.MemoryView":725
*
* if isinstance(memview, _memoryviewslice):
* memviewsliceobj = memview # <<<<<<<<<<<<<<
* p_src = &memviewsliceobj.from_slice
* else:
*/
if (!(likely(((((PyObject *)__pyx_v_memview)) == Py_None) || likely(__Pyx_TypeTest(((PyObject *)__pyx_v_memview), __pyx_memoryviewslice_type))))) __PYX_ERR(2, 725, __pyx_L1_error)
__pyx_t_3 = ((PyObject *)__pyx_v_memview);
__Pyx_INCREF(__pyx_t_3);
__pyx_v_memviewsliceobj = ((struct __pyx_memoryviewslice_obj *)__pyx_t_3);
__pyx_t_3 = 0;
/* "View.MemoryView":726
* if isinstance(memview, _memoryviewslice):
* memviewsliceobj = memview
* p_src = &memviewsliceobj.from_slice # <<<<<<<<<<<<<<
* else:
* slice_copy(memview, &src)
*/
__pyx_v_p_src = (&__pyx_v_memviewsliceobj->from_slice);
/* "View.MemoryView":724
* assert memview.view.ndim > 0
*
* if isinstance(memview, _memoryviewslice): # <<<<<<<<<<<<<<
* memviewsliceobj = memview
* p_src = &memviewsliceobj.from_slice
*/
goto __pyx_L3;
}
/* "View.MemoryView":728
* p_src = &memviewsliceobj.from_slice
* else:
* slice_copy(memview, &src) # <<<<<<<<<<<<<<
* p_src = &src
*
*/
/*else*/ {
__pyx_memoryview_slice_copy(__pyx_v_memview, (&__pyx_v_src));
/* "View.MemoryView":729
* else:
* slice_copy(memview, &src)
* p_src = &src # <<<<<<<<<<<<<<
*
*
*/
__pyx_v_p_src = (&__pyx_v_src);
}
__pyx_L3:;
/* "View.MemoryView":735
*
*
* dst.memview = p_src.memview # <<<<<<<<<<<<<<
* dst.data = p_src.data
*
*/
__pyx_t_4 = __pyx_v_p_src->memview;
__pyx_v_dst.memview = __pyx_t_4;
/* "View.MemoryView":736
*
* dst.memview = p_src.memview
* dst.data = p_src.data # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_5 = __pyx_v_p_src->data;
__pyx_v_dst.data = __pyx_t_5;
/* "View.MemoryView":741
*
*
* cdef __Pyx_memviewslice *p_dst = &dst # <<<<<<<<<<<<<<
* cdef int *p_suboffset_dim = &suboffset_dim
* cdef Py_ssize_t start, stop, step
*/
__pyx_v_p_dst = (&__pyx_v_dst);
/* "View.MemoryView":742
*
* cdef __Pyx_memviewslice *p_dst = &dst
* cdef int *p_suboffset_dim = &suboffset_dim # <<<<<<<<<<<<<<
* cdef Py_ssize_t start, stop, step
* cdef bint have_start, have_stop, have_step
*/
__pyx_v_p_suboffset_dim = (&__pyx_v_suboffset_dim);
/* "View.MemoryView":746
* cdef bint have_start, have_stop, have_step
*
* for dim, index in enumerate(indices): # <<<<<<<<<<<<<<
* if PyIndex_Check(index):
* slice_memviewslice(
*/
__pyx_t_6 = 0;
if (likely(PyList_CheckExact(__pyx_v_indices)) || PyTuple_CheckExact(__pyx_v_indices)) {
__pyx_t_3 = __pyx_v_indices; __Pyx_INCREF(__pyx_t_3); __pyx_t_7 = 0;
__pyx_t_8 = NULL;
} else {
__pyx_t_7 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_indices); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 746, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_8 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 746, __pyx_L1_error)
}
for (;;) {
if (likely(!__pyx_t_8)) {
if (likely(PyList_CheckExact(__pyx_t_3))) {
if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_3)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_9 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(2, 746, __pyx_L1_error)
#else
__pyx_t_9 = PySequence_ITEM(__pyx_t_3, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 746, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_9);
#endif
} else {
if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_3)) break;
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
__pyx_t_9 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_9); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(2, 746, __pyx_L1_error)
#else
__pyx_t_9 = PySequence_ITEM(__pyx_t_3, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 746, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_9);
#endif
}
} else {
__pyx_t_9 = __pyx_t_8(__pyx_t_3);
if (unlikely(!__pyx_t_9)) {
PyObject* exc_type = PyErr_Occurred();
if (exc_type) {
if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();
else __PYX_ERR(2, 746, __pyx_L1_error)
}
break;
}
__Pyx_GOTREF(__pyx_t_9);
}
__Pyx_XDECREF_SET(__pyx_v_index, __pyx_t_9);
__pyx_t_9 = 0;
__pyx_v_dim = __pyx_t_6;
__pyx_t_6 = (__pyx_t_6 + 1);
/* "View.MemoryView":747
*
* for dim, index in enumerate(indices):
* if PyIndex_Check(index): # <<<<<<<<<<<<<<
* slice_memviewslice(
* p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim],
*/
__pyx_t_2 = (PyIndex_Check(__pyx_v_index) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":751
* p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim],
* dim, new_ndim, p_suboffset_dim,
* index, 0, 0, # start, stop, step # <<<<<<<<<<<<<<
* 0, 0, 0, # have_{start,stop,step}
* False)
*/
__pyx_t_10 = __Pyx_PyIndex_AsSsize_t(__pyx_v_index); if (unlikely((__pyx_t_10 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 751, __pyx_L1_error)
/* "View.MemoryView":748
* for dim, index in enumerate(indices):
* if PyIndex_Check(index):
* slice_memviewslice( # <<<<<<<<<<<<<<
* p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim],
* dim, new_ndim, p_suboffset_dim,
*/
__pyx_t_11 = __pyx_memoryview_slice_memviewslice(__pyx_v_p_dst, (__pyx_v_p_src->shape[__pyx_v_dim]), (__pyx_v_p_src->strides[__pyx_v_dim]), (__pyx_v_p_src->suboffsets[__pyx_v_dim]), __pyx_v_dim, __pyx_v_new_ndim, __pyx_v_p_suboffset_dim, __pyx_t_10, 0, 0, 0, 0, 0, 0); if (unlikely(__pyx_t_11 == ((int)-1))) __PYX_ERR(2, 748, __pyx_L1_error)
/* "View.MemoryView":747
*
* for dim, index in enumerate(indices):
* if PyIndex_Check(index): # <<<<<<<<<<<<<<
* slice_memviewslice(
* p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim],
*/
goto __pyx_L6;
}
/* "View.MemoryView":754
* 0, 0, 0, # have_{start,stop,step}
* False)
* elif index is None: # <<<<<<<<<<<<<<
* p_dst.shape[new_ndim] = 1
* p_dst.strides[new_ndim] = 0
*/
__pyx_t_2 = (__pyx_v_index == Py_None);
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* "View.MemoryView":755
* False)
* elif index is None:
* p_dst.shape[new_ndim] = 1 # <<<<<<<<<<<<<<
* p_dst.strides[new_ndim] = 0
* p_dst.suboffsets[new_ndim] = -1
*/
(__pyx_v_p_dst->shape[__pyx_v_new_ndim]) = 1;
/* "View.MemoryView":756
* elif index is None:
* p_dst.shape[new_ndim] = 1
* p_dst.strides[new_ndim] = 0 # <<<<<<<<<<<<<<
* p_dst.suboffsets[new_ndim] = -1
* new_ndim += 1
*/
(__pyx_v_p_dst->strides[__pyx_v_new_ndim]) = 0;
/* "View.MemoryView":757
* p_dst.shape[new_ndim] = 1
* p_dst.strides[new_ndim] = 0
* p_dst.suboffsets[new_ndim] = -1 # <<<<<<<<<<<<<<
* new_ndim += 1
* else:
*/
(__pyx_v_p_dst->suboffsets[__pyx_v_new_ndim]) = -1L;
/* "View.MemoryView":758
* p_dst.strides[new_ndim] = 0
* p_dst.suboffsets[new_ndim] = -1
* new_ndim += 1 # <<<<<<<<<<<<<<
* else:
* start = index.start or 0
*/
__pyx_v_new_ndim = (__pyx_v_new_ndim + 1);
/* "View.MemoryView":754
* 0, 0, 0, # have_{start,stop,step}
* False)
* elif index is None: # <<<<<<<<<<<<<<
* p_dst.shape[new_ndim] = 1
* p_dst.strides[new_ndim] = 0
*/
goto __pyx_L6;
}
/* "View.MemoryView":760
* new_ndim += 1
* else:
* start = index.start or 0 # <<<<<<<<<<<<<<
* stop = index.stop or 0
* step = index.step or 0
*/
/*else*/ {
__pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_index, __pyx_n_s_start); if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 760, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_9);
__pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_9); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(2, 760, __pyx_L1_error)
if (!__pyx_t_1) {
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
} else {
__pyx_t_12 = __Pyx_PyIndex_AsSsize_t(__pyx_t_9); if (unlikely((__pyx_t_12 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 760, __pyx_L1_error)
__pyx_t_10 = __pyx_t_12;
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
goto __pyx_L7_bool_binop_done;
}
__pyx_t_10 = 0;
__pyx_L7_bool_binop_done:;
__pyx_v_start = __pyx_t_10;
/* "View.MemoryView":761
* else:
* start = index.start or 0
* stop = index.stop or 0 # <<<<<<<<<<<<<<
* step = index.step or 0
*
*/
__pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_index, __pyx_n_s_stop); if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 761, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_9);
__pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_9); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(2, 761, __pyx_L1_error)
if (!__pyx_t_1) {
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
} else {
__pyx_t_12 = __Pyx_PyIndex_AsSsize_t(__pyx_t_9); if (unlikely((__pyx_t_12 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 761, __pyx_L1_error)
__pyx_t_10 = __pyx_t_12;
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
goto __pyx_L9_bool_binop_done;
}
__pyx_t_10 = 0;
__pyx_L9_bool_binop_done:;
__pyx_v_stop = __pyx_t_10;
/* "View.MemoryView":762
* start = index.start or 0
* stop = index.stop or 0
* step = index.step or 0 # <<<<<<<<<<<<<<
*
* have_start = index.start is not None
*/
__pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_index, __pyx_n_s_step); if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 762, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_9);
__pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_9); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(2, 762, __pyx_L1_error)
if (!__pyx_t_1) {
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
} else {
__pyx_t_12 = __Pyx_PyIndex_AsSsize_t(__pyx_t_9); if (unlikely((__pyx_t_12 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 762, __pyx_L1_error)
__pyx_t_10 = __pyx_t_12;
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
goto __pyx_L11_bool_binop_done;
}
__pyx_t_10 = 0;
__pyx_L11_bool_binop_done:;
__pyx_v_step = __pyx_t_10;
/* "View.MemoryView":764
* step = index.step or 0
*
* have_start = index.start is not None # <<<<<<<<<<<<<<
* have_stop = index.stop is not None
* have_step = index.step is not None
*/
__pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_index, __pyx_n_s_start); if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 764, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_9);
__pyx_t_1 = (__pyx_t_9 != Py_None);
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
__pyx_v_have_start = __pyx_t_1;
/* "View.MemoryView":765
*
* have_start = index.start is not None
* have_stop = index.stop is not None # <<<<<<<<<<<<<<
* have_step = index.step is not None
*
*/
__pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_index, __pyx_n_s_stop); if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 765, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_9);
__pyx_t_1 = (__pyx_t_9 != Py_None);
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
__pyx_v_have_stop = __pyx_t_1;
/* "View.MemoryView":766
* have_start = index.start is not None
* have_stop = index.stop is not None
* have_step = index.step is not None # <<<<<<<<<<<<<<
*
* slice_memviewslice(
*/
__pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_index, __pyx_n_s_step); if (unlikely(!__pyx_t_9)) __PYX_ERR(2, 766, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_9);
__pyx_t_1 = (__pyx_t_9 != Py_None);
__Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
__pyx_v_have_step = __pyx_t_1;
/* "View.MemoryView":768
* have_step = index.step is not None
*
* slice_memviewslice( # <<<<<<<<<<<<<<
* p_dst, p_src.shape[dim], p_src.strides[dim], p_src.suboffsets[dim],
* dim, new_ndim, p_suboffset_dim,
*/
__pyx_t_11 = __pyx_memoryview_slice_memviewslice(__pyx_v_p_dst, (__pyx_v_p_src->shape[__pyx_v_dim]), (__pyx_v_p_src->strides[__pyx_v_dim]), (__pyx_v_p_src->suboffsets[__pyx_v_dim]), __pyx_v_dim, __pyx_v_new_ndim, __pyx_v_p_suboffset_dim, __pyx_v_start, __pyx_v_stop, __pyx_v_step, __pyx_v_have_start, __pyx_v_have_stop, __pyx_v_have_step, 1); if (unlikely(__pyx_t_11 == ((int)-1))) __PYX_ERR(2, 768, __pyx_L1_error)
/* "View.MemoryView":774
* have_start, have_stop, have_step,
* True)
* new_ndim += 1 # <<<<<<<<<<<<<<
*
* if isinstance(memview, _memoryviewslice):
*/
__pyx_v_new_ndim = (__pyx_v_new_ndim + 1);
}
__pyx_L6:;
/* "View.MemoryView":746
* cdef bint have_start, have_stop, have_step
*
* for dim, index in enumerate(indices): # <<<<<<<<<<<<<<
* if PyIndex_Check(index):
* slice_memviewslice(
*/
}
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "View.MemoryView":776
* new_ndim += 1
*
* if isinstance(memview, _memoryviewslice): # <<<<<<<<<<<<<<
* return memoryview_fromslice(dst, new_ndim,
* memviewsliceobj.to_object_func,
*/
__pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_memview), __pyx_memoryviewslice_type);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "View.MemoryView":777
*
* if isinstance(memview, _memoryviewslice):
* return memoryview_fromslice(dst, new_ndim, # <<<<<<<<<<<<<<
* memviewsliceobj.to_object_func,
* memviewsliceobj.to_dtype_func,
*/
__Pyx_XDECREF(((PyObject *)__pyx_r));
/* "View.MemoryView":778
* if isinstance(memview, _memoryviewslice):
* return memoryview_fromslice(dst, new_ndim,
* memviewsliceobj.to_object_func, # <<<<<<<<<<<<<<
* memviewsliceobj.to_dtype_func,
* memview.dtype_is_object)
*/
if (unlikely(!__pyx_v_memviewsliceobj)) { __Pyx_RaiseUnboundLocalError("memviewsliceobj"); __PYX_ERR(2, 778, __pyx_L1_error) }
/* "View.MemoryView":779
* return memoryview_fromslice(dst, new_ndim,
* memviewsliceobj.to_object_func,
* memviewsliceobj.to_dtype_func, # <<<<<<<<<<<<<<
* memview.dtype_is_object)
* else:
*/
if (unlikely(!__pyx_v_memviewsliceobj)) { __Pyx_RaiseUnboundLocalError("memviewsliceobj"); __PYX_ERR(2, 779, __pyx_L1_error) }
/* "View.MemoryView":777
*
* if isinstance(memview, _memoryviewslice):
* return memoryview_fromslice(dst, new_ndim, # <<<<<<<<<<<<<<
* memviewsliceobj.to_object_func,
* memviewsliceobj.to_dtype_func,
*/
__pyx_t_3 = __pyx_memoryview_fromslice(__pyx_v_dst, __pyx_v_new_ndim, __pyx_v_memviewsliceobj->to_object_func, __pyx_v_memviewsliceobj->to_dtype_func, __pyx_v_memview->dtype_is_object); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 777, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_memoryview_type))))) __PYX_ERR(2, 777, __pyx_L1_error)
__pyx_r = ((struct __pyx_memoryview_obj *)__pyx_t_3);
__pyx_t_3 = 0;
goto __pyx_L0;
/* "View.MemoryView":776
* new_ndim += 1
*
* if isinstance(memview, _memoryviewslice): # <<<<<<<<<<<<<<
* return memoryview_fromslice(dst, new_ndim,
* memviewsliceobj.to_object_func,
*/
}
/* "View.MemoryView":782
* memview.dtype_is_object)
* else:
* return memoryview_fromslice(dst, new_ndim, NULL, NULL, # <<<<<<<<<<<<<<
* memview.dtype_is_object)
*
*/
/*else*/ {
__Pyx_XDECREF(((PyObject *)__pyx_r));
/* "View.MemoryView":783
* else:
* return memoryview_fromslice(dst, new_ndim, NULL, NULL,
* memview.dtype_is_object) # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_3 = __pyx_memoryview_fromslice(__pyx_v_dst, __pyx_v_new_ndim, NULL, NULL, __pyx_v_memview->dtype_is_object); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 782, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
/* "View.MemoryView":782
* memview.dtype_is_object)
* else:
* return memoryview_fromslice(dst, new_ndim, NULL, NULL, # <<<<<<<<<<<<<<
* memview.dtype_is_object)
*
*/
if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_memoryview_type))))) __PYX_ERR(2, 782, __pyx_L1_error)
__pyx_r = ((struct __pyx_memoryview_obj *)__pyx_t_3);
__pyx_t_3 = 0;
goto __pyx_L0;
}
/* "View.MemoryView":710
*
* @cname('__pyx_memview_slice')
* cdef memoryview memview_slice(memoryview memview, object indices): # <<<<<<<<<<<<<<
* cdef int new_ndim = 0, suboffset_dim = -1, dim
* cdef bint negative_step
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_9);
__Pyx_AddTraceback("View.MemoryView.memview_slice", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_memviewsliceobj);
__Pyx_XDECREF(__pyx_v_index);
__Pyx_XGIVEREF((PyObject *)__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":807
*
* @cname('__pyx_memoryview_slice_memviewslice')
* cdef int slice_memviewslice( # <<<<<<<<<<<<<<
* __Pyx_memviewslice *dst,
* Py_ssize_t shape, Py_ssize_t stride, Py_ssize_t suboffset,
*/
static int __pyx_memoryview_slice_memviewslice(__Pyx_memviewslice *__pyx_v_dst, Py_ssize_t __pyx_v_shape, Py_ssize_t __pyx_v_stride, Py_ssize_t __pyx_v_suboffset, int __pyx_v_dim, int __pyx_v_new_ndim, int *__pyx_v_suboffset_dim, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_stop, Py_ssize_t __pyx_v_step, int __pyx_v_have_start, int __pyx_v_have_stop, int __pyx_v_have_step, int __pyx_v_is_slice) {
Py_ssize_t __pyx_v_new_shape;
int __pyx_v_negative_step;
int __pyx_r;
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
/* "View.MemoryView":827
* cdef bint negative_step
*
* if not is_slice: # <<<<<<<<<<<<<<
*
* if start < 0:
*/
__pyx_t_1 = ((!(__pyx_v_is_slice != 0)) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":829
* if not is_slice:
*
* if start < 0: # <<<<<<<<<<<<<<
* start += shape
* if not 0 <= start < shape:
*/
__pyx_t_1 = ((__pyx_v_start < 0) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":830
*
* if start < 0:
* start += shape # <<<<<<<<<<<<<<
* if not 0 <= start < shape:
* _err_dim(IndexError, "Index out of bounds (axis %d)", dim)
*/
__pyx_v_start = (__pyx_v_start + __pyx_v_shape);
/* "View.MemoryView":829
* if not is_slice:
*
* if start < 0: # <<<<<<<<<<<<<<
* start += shape
* if not 0 <= start < shape:
*/
}
/* "View.MemoryView":831
* if start < 0:
* start += shape
* if not 0 <= start < shape: # <<<<<<<<<<<<<<
* _err_dim(IndexError, "Index out of bounds (axis %d)", dim)
* else:
*/
__pyx_t_1 = (0 <= __pyx_v_start);
if (__pyx_t_1) {
__pyx_t_1 = (__pyx_v_start < __pyx_v_shape);
}
__pyx_t_2 = ((!(__pyx_t_1 != 0)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":832
* start += shape
* if not 0 <= start < shape:
* _err_dim(IndexError, "Index out of bounds (axis %d)", dim) # <<<<<<<<<<<<<<
* else:
*
*/
__pyx_t_3 = __pyx_memoryview_err_dim(__pyx_builtin_IndexError, ((char *)"Index out of bounds (axis %d)"), __pyx_v_dim); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(2, 832, __pyx_L1_error)
/* "View.MemoryView":831
* if start < 0:
* start += shape
* if not 0 <= start < shape: # <<<<<<<<<<<<<<
* _err_dim(IndexError, "Index out of bounds (axis %d)", dim)
* else:
*/
}
/* "View.MemoryView":827
* cdef bint negative_step
*
* if not is_slice: # <<<<<<<<<<<<<<
*
* if start < 0:
*/
goto __pyx_L3;
}
/* "View.MemoryView":835
* else:
*
* negative_step = have_step != 0 and step < 0 # <<<<<<<<<<<<<<
*
* if have_step and step == 0:
*/
/*else*/ {
__pyx_t_1 = ((__pyx_v_have_step != 0) != 0);
if (__pyx_t_1) {
} else {
__pyx_t_2 = __pyx_t_1;
goto __pyx_L6_bool_binop_done;
}
__pyx_t_1 = ((__pyx_v_step < 0) != 0);
__pyx_t_2 = __pyx_t_1;
__pyx_L6_bool_binop_done:;
__pyx_v_negative_step = __pyx_t_2;
/* "View.MemoryView":837
* negative_step = have_step != 0 and step < 0
*
* if have_step and step == 0: # <<<<<<<<<<<<<<
* _err_dim(ValueError, "Step may not be zero (axis %d)", dim)
*
*/
__pyx_t_1 = (__pyx_v_have_step != 0);
if (__pyx_t_1) {
} else {
__pyx_t_2 = __pyx_t_1;
goto __pyx_L9_bool_binop_done;
}
__pyx_t_1 = ((__pyx_v_step == 0) != 0);
__pyx_t_2 = __pyx_t_1;
__pyx_L9_bool_binop_done:;
if (__pyx_t_2) {
/* "View.MemoryView":838
*
* if have_step and step == 0:
* _err_dim(ValueError, "Step may not be zero (axis %d)", dim) # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_3 = __pyx_memoryview_err_dim(__pyx_builtin_ValueError, ((char *)"Step may not be zero (axis %d)"), __pyx_v_dim); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(2, 838, __pyx_L1_error)
/* "View.MemoryView":837
* negative_step = have_step != 0 and step < 0
*
* if have_step and step == 0: # <<<<<<<<<<<<<<
* _err_dim(ValueError, "Step may not be zero (axis %d)", dim)
*
*/
}
/* "View.MemoryView":841
*
*
* if have_start: # <<<<<<<<<<<<<<
* if start < 0:
* start += shape
*/
__pyx_t_2 = (__pyx_v_have_start != 0);
if (__pyx_t_2) {
/* "View.MemoryView":842
*
* if have_start:
* if start < 0: # <<<<<<<<<<<<<<
* start += shape
* if start < 0:
*/
__pyx_t_2 = ((__pyx_v_start < 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":843
* if have_start:
* if start < 0:
* start += shape # <<<<<<<<<<<<<<
* if start < 0:
* start = 0
*/
__pyx_v_start = (__pyx_v_start + __pyx_v_shape);
/* "View.MemoryView":844
* if start < 0:
* start += shape
* if start < 0: # <<<<<<<<<<<<<<
* start = 0
* elif start >= shape:
*/
__pyx_t_2 = ((__pyx_v_start < 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":845
* start += shape
* if start < 0:
* start = 0 # <<<<<<<<<<<<<<
* elif start >= shape:
* if negative_step:
*/
__pyx_v_start = 0;
/* "View.MemoryView":844
* if start < 0:
* start += shape
* if start < 0: # <<<<<<<<<<<<<<
* start = 0
* elif start >= shape:
*/
}
/* "View.MemoryView":842
*
* if have_start:
* if start < 0: # <<<<<<<<<<<<<<
* start += shape
* if start < 0:
*/
goto __pyx_L12;
}
/* "View.MemoryView":846
* if start < 0:
* start = 0
* elif start >= shape: # <<<<<<<<<<<<<<
* if negative_step:
* start = shape - 1
*/
__pyx_t_2 = ((__pyx_v_start >= __pyx_v_shape) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":847
* start = 0
* elif start >= shape:
* if negative_step: # <<<<<<<<<<<<<<
* start = shape - 1
* else:
*/
__pyx_t_2 = (__pyx_v_negative_step != 0);
if (__pyx_t_2) {
/* "View.MemoryView":848
* elif start >= shape:
* if negative_step:
* start = shape - 1 # <<<<<<<<<<<<<<
* else:
* start = shape
*/
__pyx_v_start = (__pyx_v_shape - 1);
/* "View.MemoryView":847
* start = 0
* elif start >= shape:
* if negative_step: # <<<<<<<<<<<<<<
* start = shape - 1
* else:
*/
goto __pyx_L14;
}
/* "View.MemoryView":850
* start = shape - 1
* else:
* start = shape # <<<<<<<<<<<<<<
* else:
* if negative_step:
*/
/*else*/ {
__pyx_v_start = __pyx_v_shape;
}
__pyx_L14:;
/* "View.MemoryView":846
* if start < 0:
* start = 0
* elif start >= shape: # <<<<<<<<<<<<<<
* if negative_step:
* start = shape - 1
*/
}
__pyx_L12:;
/* "View.MemoryView":841
*
*
* if have_start: # <<<<<<<<<<<<<<
* if start < 0:
* start += shape
*/
goto __pyx_L11;
}
/* "View.MemoryView":852
* start = shape
* else:
* if negative_step: # <<<<<<<<<<<<<<
* start = shape - 1
* else:
*/
/*else*/ {
__pyx_t_2 = (__pyx_v_negative_step != 0);
if (__pyx_t_2) {
/* "View.MemoryView":853
* else:
* if negative_step:
* start = shape - 1 # <<<<<<<<<<<<<<
* else:
* start = 0
*/
__pyx_v_start = (__pyx_v_shape - 1);
/* "View.MemoryView":852
* start = shape
* else:
* if negative_step: # <<<<<<<<<<<<<<
* start = shape - 1
* else:
*/
goto __pyx_L15;
}
/* "View.MemoryView":855
* start = shape - 1
* else:
* start = 0 # <<<<<<<<<<<<<<
*
* if have_stop:
*/
/*else*/ {
__pyx_v_start = 0;
}
__pyx_L15:;
}
__pyx_L11:;
/* "View.MemoryView":857
* start = 0
*
* if have_stop: # <<<<<<<<<<<<<<
* if stop < 0:
* stop += shape
*/
__pyx_t_2 = (__pyx_v_have_stop != 0);
if (__pyx_t_2) {
/* "View.MemoryView":858
*
* if have_stop:
* if stop < 0: # <<<<<<<<<<<<<<
* stop += shape
* if stop < 0:
*/
__pyx_t_2 = ((__pyx_v_stop < 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":859
* if have_stop:
* if stop < 0:
* stop += shape # <<<<<<<<<<<<<<
* if stop < 0:
* stop = 0
*/
__pyx_v_stop = (__pyx_v_stop + __pyx_v_shape);
/* "View.MemoryView":860
* if stop < 0:
* stop += shape
* if stop < 0: # <<<<<<<<<<<<<<
* stop = 0
* elif stop > shape:
*/
__pyx_t_2 = ((__pyx_v_stop < 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":861
* stop += shape
* if stop < 0:
* stop = 0 # <<<<<<<<<<<<<<
* elif stop > shape:
* stop = shape
*/
__pyx_v_stop = 0;
/* "View.MemoryView":860
* if stop < 0:
* stop += shape
* if stop < 0: # <<<<<<<<<<<<<<
* stop = 0
* elif stop > shape:
*/
}
/* "View.MemoryView":858
*
* if have_stop:
* if stop < 0: # <<<<<<<<<<<<<<
* stop += shape
* if stop < 0:
*/
goto __pyx_L17;
}
/* "View.MemoryView":862
* if stop < 0:
* stop = 0
* elif stop > shape: # <<<<<<<<<<<<<<
* stop = shape
* else:
*/
__pyx_t_2 = ((__pyx_v_stop > __pyx_v_shape) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":863
* stop = 0
* elif stop > shape:
* stop = shape # <<<<<<<<<<<<<<
* else:
* if negative_step:
*/
__pyx_v_stop = __pyx_v_shape;
/* "View.MemoryView":862
* if stop < 0:
* stop = 0
* elif stop > shape: # <<<<<<<<<<<<<<
* stop = shape
* else:
*/
}
__pyx_L17:;
/* "View.MemoryView":857
* start = 0
*
* if have_stop: # <<<<<<<<<<<<<<
* if stop < 0:
* stop += shape
*/
goto __pyx_L16;
}
/* "View.MemoryView":865
* stop = shape
* else:
* if negative_step: # <<<<<<<<<<<<<<
* stop = -1
* else:
*/
/*else*/ {
__pyx_t_2 = (__pyx_v_negative_step != 0);
if (__pyx_t_2) {
/* "View.MemoryView":866
* else:
* if negative_step:
* stop = -1 # <<<<<<<<<<<<<<
* else:
* stop = shape
*/
__pyx_v_stop = -1L;
/* "View.MemoryView":865
* stop = shape
* else:
* if negative_step: # <<<<<<<<<<<<<<
* stop = -1
* else:
*/
goto __pyx_L19;
}
/* "View.MemoryView":868
* stop = -1
* else:
* stop = shape # <<<<<<<<<<<<<<
*
* if not have_step:
*/
/*else*/ {
__pyx_v_stop = __pyx_v_shape;
}
__pyx_L19:;
}
__pyx_L16:;
/* "View.MemoryView":870
* stop = shape
*
* if not have_step: # <<<<<<<<<<<<<<
* step = 1
*
*/
__pyx_t_2 = ((!(__pyx_v_have_step != 0)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":871
*
* if not have_step:
* step = 1 # <<<<<<<<<<<<<<
*
*
*/
__pyx_v_step = 1;
/* "View.MemoryView":870
* stop = shape
*
* if not have_step: # <<<<<<<<<<<<<<
* step = 1
*
*/
}
/* "View.MemoryView":875
*
* with cython.cdivision(True):
* new_shape = (stop - start) // step # <<<<<<<<<<<<<<
*
* if (stop - start) - step * new_shape:
*/
__pyx_v_new_shape = ((__pyx_v_stop - __pyx_v_start) / __pyx_v_step);
/* "View.MemoryView":877
* new_shape = (stop - start) // step
*
* if (stop - start) - step * new_shape: # <<<<<<<<<<<<<<
* new_shape += 1
*
*/
__pyx_t_2 = (((__pyx_v_stop - __pyx_v_start) - (__pyx_v_step * __pyx_v_new_shape)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":878
*
* if (stop - start) - step * new_shape:
* new_shape += 1 # <<<<<<<<<<<<<<
*
* if new_shape < 0:
*/
__pyx_v_new_shape = (__pyx_v_new_shape + 1);
/* "View.MemoryView":877
* new_shape = (stop - start) // step
*
* if (stop - start) - step * new_shape: # <<<<<<<<<<<<<<
* new_shape += 1
*
*/
}
/* "View.MemoryView":880
* new_shape += 1
*
* if new_shape < 0: # <<<<<<<<<<<<<<
* new_shape = 0
*
*/
__pyx_t_2 = ((__pyx_v_new_shape < 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":881
*
* if new_shape < 0:
* new_shape = 0 # <<<<<<<<<<<<<<
*
*
*/
__pyx_v_new_shape = 0;
/* "View.MemoryView":880
* new_shape += 1
*
* if new_shape < 0: # <<<<<<<<<<<<<<
* new_shape = 0
*
*/
}
/* "View.MemoryView":884
*
*
* dst.strides[new_ndim] = stride * step # <<<<<<<<<<<<<<
* dst.shape[new_ndim] = new_shape
* dst.suboffsets[new_ndim] = suboffset
*/
(__pyx_v_dst->strides[__pyx_v_new_ndim]) = (__pyx_v_stride * __pyx_v_step);
/* "View.MemoryView":885
*
* dst.strides[new_ndim] = stride * step
* dst.shape[new_ndim] = new_shape # <<<<<<<<<<<<<<
* dst.suboffsets[new_ndim] = suboffset
*
*/
(__pyx_v_dst->shape[__pyx_v_new_ndim]) = __pyx_v_new_shape;
/* "View.MemoryView":886
* dst.strides[new_ndim] = stride * step
* dst.shape[new_ndim] = new_shape
* dst.suboffsets[new_ndim] = suboffset # <<<<<<<<<<<<<<
*
*
*/
(__pyx_v_dst->suboffsets[__pyx_v_new_ndim]) = __pyx_v_suboffset;
}
__pyx_L3:;
/* "View.MemoryView":889
*
*
* if suboffset_dim[0] < 0: # <<<<<<<<<<<<<<
* dst.data += start * stride
* else:
*/
__pyx_t_2 = (((__pyx_v_suboffset_dim[0]) < 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":890
*
* if suboffset_dim[0] < 0:
* dst.data += start * stride # <<<<<<<<<<<<<<
* else:
* dst.suboffsets[suboffset_dim[0]] += start * stride
*/
__pyx_v_dst->data = (__pyx_v_dst->data + (__pyx_v_start * __pyx_v_stride));
/* "View.MemoryView":889
*
*
* if suboffset_dim[0] < 0: # <<<<<<<<<<<<<<
* dst.data += start * stride
* else:
*/
goto __pyx_L23;
}
/* "View.MemoryView":892
* dst.data += start * stride
* else:
* dst.suboffsets[suboffset_dim[0]] += start * stride # <<<<<<<<<<<<<<
*
* if suboffset >= 0:
*/
/*else*/ {
__pyx_t_3 = (__pyx_v_suboffset_dim[0]);
(__pyx_v_dst->suboffsets[__pyx_t_3]) = ((__pyx_v_dst->suboffsets[__pyx_t_3]) + (__pyx_v_start * __pyx_v_stride));
}
__pyx_L23:;
/* "View.MemoryView":894
* dst.suboffsets[suboffset_dim[0]] += start * stride
*
* if suboffset >= 0: # <<<<<<<<<<<<<<
* if not is_slice:
* if new_ndim == 0:
*/
__pyx_t_2 = ((__pyx_v_suboffset >= 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":895
*
* if suboffset >= 0:
* if not is_slice: # <<<<<<<<<<<<<<
* if new_ndim == 0:
* dst.data = (<char **> dst.data)[0] + suboffset
*/
__pyx_t_2 = ((!(__pyx_v_is_slice != 0)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":896
* if suboffset >= 0:
* if not is_slice:
* if new_ndim == 0: # <<<<<<<<<<<<<<
* dst.data = (<char **> dst.data)[0] + suboffset
* else:
*/
__pyx_t_2 = ((__pyx_v_new_ndim == 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":897
* if not is_slice:
* if new_ndim == 0:
* dst.data = (<char **> dst.data)[0] + suboffset # <<<<<<<<<<<<<<
* else:
* _err_dim(IndexError, "All dimensions preceding dimension %d "
*/
__pyx_v_dst->data = ((((char **)__pyx_v_dst->data)[0]) + __pyx_v_suboffset);
/* "View.MemoryView":896
* if suboffset >= 0:
* if not is_slice:
* if new_ndim == 0: # <<<<<<<<<<<<<<
* dst.data = (<char **> dst.data)[0] + suboffset
* else:
*/
goto __pyx_L26;
}
/* "View.MemoryView":899
* dst.data = (<char **> dst.data)[0] + suboffset
* else:
* _err_dim(IndexError, "All dimensions preceding dimension %d " # <<<<<<<<<<<<<<
* "must be indexed and not sliced", dim)
* else:
*/
/*else*/ {
/* "View.MemoryView":900
* else:
* _err_dim(IndexError, "All dimensions preceding dimension %d "
* "must be indexed and not sliced", dim) # <<<<<<<<<<<<<<
* else:
* suboffset_dim[0] = new_ndim
*/
__pyx_t_3 = __pyx_memoryview_err_dim(__pyx_builtin_IndexError, ((char *)"All dimensions preceding dimension %d must be indexed and not sliced"), __pyx_v_dim); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(2, 899, __pyx_L1_error)
}
__pyx_L26:;
/* "View.MemoryView":895
*
* if suboffset >= 0:
* if not is_slice: # <<<<<<<<<<<<<<
* if new_ndim == 0:
* dst.data = (<char **> dst.data)[0] + suboffset
*/
goto __pyx_L25;
}
/* "View.MemoryView":902
* "must be indexed and not sliced", dim)
* else:
* suboffset_dim[0] = new_ndim # <<<<<<<<<<<<<<
*
* return 0
*/
/*else*/ {
(__pyx_v_suboffset_dim[0]) = __pyx_v_new_ndim;
}
__pyx_L25:;
/* "View.MemoryView":894
* dst.suboffsets[suboffset_dim[0]] += start * stride
*
* if suboffset >= 0: # <<<<<<<<<<<<<<
* if not is_slice:
* if new_ndim == 0:
*/
}
/* "View.MemoryView":904
* suboffset_dim[0] = new_ndim
*
* return 0 # <<<<<<<<<<<<<<
*
*
*/
__pyx_r = 0;
goto __pyx_L0;
/* "View.MemoryView":807
*
* @cname('__pyx_memoryview_slice_memviewslice')
* cdef int slice_memviewslice( # <<<<<<<<<<<<<<
* __Pyx_memviewslice *dst,
* Py_ssize_t shape, Py_ssize_t stride, Py_ssize_t suboffset,
*/
/* function exit code */
__pyx_L1_error:;
{
#ifdef WITH_THREAD
PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure();
#endif
__Pyx_AddTraceback("View.MemoryView.slice_memviewslice", __pyx_clineno, __pyx_lineno, __pyx_filename);
#ifdef WITH_THREAD
__Pyx_PyGILState_Release(__pyx_gilstate_save);
#endif
}
__pyx_r = -1;
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":910
*
* @cname('__pyx_pybuffer_index')
* cdef char *pybuffer_index(Py_buffer *view, char *bufp, Py_ssize_t index, # <<<<<<<<<<<<<<
* Py_ssize_t dim) except NULL:
* cdef Py_ssize_t shape, stride, suboffset = -1
*/
static char *__pyx_pybuffer_index(Py_buffer *__pyx_v_view, char *__pyx_v_bufp, Py_ssize_t __pyx_v_index, Py_ssize_t __pyx_v_dim) {
Py_ssize_t __pyx_v_shape;
Py_ssize_t __pyx_v_stride;
Py_ssize_t __pyx_v_suboffset;
Py_ssize_t __pyx_v_itemsize;
char *__pyx_v_resultp;
char *__pyx_r;
__Pyx_RefNannyDeclarations
Py_ssize_t __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
__Pyx_RefNannySetupContext("pybuffer_index", 0);
/* "View.MemoryView":912
* cdef char *pybuffer_index(Py_buffer *view, char *bufp, Py_ssize_t index,
* Py_ssize_t dim) except NULL:
* cdef Py_ssize_t shape, stride, suboffset = -1 # <<<<<<<<<<<<<<
* cdef Py_ssize_t itemsize = view.itemsize
* cdef char *resultp
*/
__pyx_v_suboffset = -1L;
/* "View.MemoryView":913
* Py_ssize_t dim) except NULL:
* cdef Py_ssize_t shape, stride, suboffset = -1
* cdef Py_ssize_t itemsize = view.itemsize # <<<<<<<<<<<<<<
* cdef char *resultp
*
*/
__pyx_t_1 = __pyx_v_view->itemsize;
__pyx_v_itemsize = __pyx_t_1;
/* "View.MemoryView":916
* cdef char *resultp
*
* if view.ndim == 0: # <<<<<<<<<<<<<<
* shape = view.len / itemsize
* stride = itemsize
*/
__pyx_t_2 = ((__pyx_v_view->ndim == 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":917
*
* if view.ndim == 0:
* shape = view.len / itemsize # <<<<<<<<<<<<<<
* stride = itemsize
* else:
*/
if (unlikely(__pyx_v_itemsize == 0)) {
PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero");
__PYX_ERR(2, 917, __pyx_L1_error)
}
else if (sizeof(Py_ssize_t) == sizeof(long) && (!(((Py_ssize_t)-1) > 0)) && unlikely(__pyx_v_itemsize == (Py_ssize_t)-1) && unlikely(UNARY_NEG_WOULD_OVERFLOW(__pyx_v_view->len))) {
PyErr_SetString(PyExc_OverflowError, "value too large to perform division");
__PYX_ERR(2, 917, __pyx_L1_error)
}
__pyx_v_shape = __Pyx_div_Py_ssize_t(__pyx_v_view->len, __pyx_v_itemsize);
/* "View.MemoryView":918
* if view.ndim == 0:
* shape = view.len / itemsize
* stride = itemsize # <<<<<<<<<<<<<<
* else:
* shape = view.shape[dim]
*/
__pyx_v_stride = __pyx_v_itemsize;
/* "View.MemoryView":916
* cdef char *resultp
*
* if view.ndim == 0: # <<<<<<<<<<<<<<
* shape = view.len / itemsize
* stride = itemsize
*/
goto __pyx_L3;
}
/* "View.MemoryView":920
* stride = itemsize
* else:
* shape = view.shape[dim] # <<<<<<<<<<<<<<
* stride = view.strides[dim]
* if view.suboffsets != NULL:
*/
/*else*/ {
__pyx_v_shape = (__pyx_v_view->shape[__pyx_v_dim]);
/* "View.MemoryView":921
* else:
* shape = view.shape[dim]
* stride = view.strides[dim] # <<<<<<<<<<<<<<
* if view.suboffsets != NULL:
* suboffset = view.suboffsets[dim]
*/
__pyx_v_stride = (__pyx_v_view->strides[__pyx_v_dim]);
/* "View.MemoryView":922
* shape = view.shape[dim]
* stride = view.strides[dim]
* if view.suboffsets != NULL: # <<<<<<<<<<<<<<
* suboffset = view.suboffsets[dim]
*
*/
__pyx_t_2 = ((__pyx_v_view->suboffsets != NULL) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":923
* stride = view.strides[dim]
* if view.suboffsets != NULL:
* suboffset = view.suboffsets[dim] # <<<<<<<<<<<<<<
*
* if index < 0:
*/
__pyx_v_suboffset = (__pyx_v_view->suboffsets[__pyx_v_dim]);
/* "View.MemoryView":922
* shape = view.shape[dim]
* stride = view.strides[dim]
* if view.suboffsets != NULL: # <<<<<<<<<<<<<<
* suboffset = view.suboffsets[dim]
*
*/
}
}
__pyx_L3:;
/* "View.MemoryView":925
* suboffset = view.suboffsets[dim]
*
* if index < 0: # <<<<<<<<<<<<<<
* index += view.shape[dim]
* if index < 0:
*/
__pyx_t_2 = ((__pyx_v_index < 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":926
*
* if index < 0:
* index += view.shape[dim] # <<<<<<<<<<<<<<
* if index < 0:
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim)
*/
__pyx_v_index = (__pyx_v_index + (__pyx_v_view->shape[__pyx_v_dim]));
/* "View.MemoryView":927
* if index < 0:
* index += view.shape[dim]
* if index < 0: # <<<<<<<<<<<<<<
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim)
*
*/
__pyx_t_2 = ((__pyx_v_index < 0) != 0);
if (unlikely(__pyx_t_2)) {
/* "View.MemoryView":928
* index += view.shape[dim]
* if index < 0:
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim) # <<<<<<<<<<<<<<
*
* if index >= shape:
*/
__pyx_t_3 = PyInt_FromSsize_t(__pyx_v_dim); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 928, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Out_of_bounds_on_buffer_access_a, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 928, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_IndexError, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 928, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(2, 928, __pyx_L1_error)
/* "View.MemoryView":927
* if index < 0:
* index += view.shape[dim]
* if index < 0: # <<<<<<<<<<<<<<
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim)
*
*/
}
/* "View.MemoryView":925
* suboffset = view.suboffsets[dim]
*
* if index < 0: # <<<<<<<<<<<<<<
* index += view.shape[dim]
* if index < 0:
*/
}
/* "View.MemoryView":930
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim)
*
* if index >= shape: # <<<<<<<<<<<<<<
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim)
*
*/
__pyx_t_2 = ((__pyx_v_index >= __pyx_v_shape) != 0);
if (unlikely(__pyx_t_2)) {
/* "View.MemoryView":931
*
* if index >= shape:
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim) # <<<<<<<<<<<<<<
*
* resultp = bufp + index * stride
*/
__pyx_t_3 = PyInt_FromSsize_t(__pyx_v_dim); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 931, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Out_of_bounds_on_buffer_access_a, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 931, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_IndexError, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 931, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(2, 931, __pyx_L1_error)
/* "View.MemoryView":930
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim)
*
* if index >= shape: # <<<<<<<<<<<<<<
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim)
*
*/
}
/* "View.MemoryView":933
* raise IndexError("Out of bounds on buffer access (axis %d)" % dim)
*
* resultp = bufp + index * stride # <<<<<<<<<<<<<<
* if suboffset >= 0:
* resultp = (<char **> resultp)[0] + suboffset
*/
__pyx_v_resultp = (__pyx_v_bufp + (__pyx_v_index * __pyx_v_stride));
/* "View.MemoryView":934
*
* resultp = bufp + index * stride
* if suboffset >= 0: # <<<<<<<<<<<<<<
* resultp = (<char **> resultp)[0] + suboffset
*
*/
__pyx_t_2 = ((__pyx_v_suboffset >= 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":935
* resultp = bufp + index * stride
* if suboffset >= 0:
* resultp = (<char **> resultp)[0] + suboffset # <<<<<<<<<<<<<<
*
* return resultp
*/
__pyx_v_resultp = ((((char **)__pyx_v_resultp)[0]) + __pyx_v_suboffset);
/* "View.MemoryView":934
*
* resultp = bufp + index * stride
* if suboffset >= 0: # <<<<<<<<<<<<<<
* resultp = (<char **> resultp)[0] + suboffset
*
*/
}
/* "View.MemoryView":937
* resultp = (<char **> resultp)[0] + suboffset
*
* return resultp # <<<<<<<<<<<<<<
*
*
*/
__pyx_r = __pyx_v_resultp;
goto __pyx_L0;
/* "View.MemoryView":910
*
* @cname('__pyx_pybuffer_index')
* cdef char *pybuffer_index(Py_buffer *view, char *bufp, Py_ssize_t index, # <<<<<<<<<<<<<<
* Py_ssize_t dim) except NULL:
* cdef Py_ssize_t shape, stride, suboffset = -1
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_AddTraceback("View.MemoryView.pybuffer_index", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":943
*
* @cname('__pyx_memslice_transpose')
* cdef int transpose_memslice(__Pyx_memviewslice *memslice) nogil except 0: # <<<<<<<<<<<<<<
* cdef int ndim = memslice.memview.view.ndim
*
*/
static int __pyx_memslice_transpose(__Pyx_memviewslice *__pyx_v_memslice) {
int __pyx_v_ndim;
Py_ssize_t *__pyx_v_shape;
Py_ssize_t *__pyx_v_strides;
int __pyx_v_i;
int __pyx_v_j;
int __pyx_r;
int __pyx_t_1;
Py_ssize_t *__pyx_t_2;
long __pyx_t_3;
long __pyx_t_4;
Py_ssize_t __pyx_t_5;
Py_ssize_t __pyx_t_6;
int __pyx_t_7;
int __pyx_t_8;
int __pyx_t_9;
/* "View.MemoryView":944
* @cname('__pyx_memslice_transpose')
* cdef int transpose_memslice(__Pyx_memviewslice *memslice) nogil except 0:
* cdef int ndim = memslice.memview.view.ndim # <<<<<<<<<<<<<<
*
* cdef Py_ssize_t *shape = memslice.shape
*/
__pyx_t_1 = __pyx_v_memslice->memview->view.ndim;
__pyx_v_ndim = __pyx_t_1;
/* "View.MemoryView":946
* cdef int ndim = memslice.memview.view.ndim
*
* cdef Py_ssize_t *shape = memslice.shape # <<<<<<<<<<<<<<
* cdef Py_ssize_t *strides = memslice.strides
*
*/
__pyx_t_2 = __pyx_v_memslice->shape;
__pyx_v_shape = __pyx_t_2;
/* "View.MemoryView":947
*
* cdef Py_ssize_t *shape = memslice.shape
* cdef Py_ssize_t *strides = memslice.strides # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_2 = __pyx_v_memslice->strides;
__pyx_v_strides = __pyx_t_2;
/* "View.MemoryView":951
*
* cdef int i, j
* for i in range(ndim / 2): # <<<<<<<<<<<<<<
* j = ndim - 1 - i
* strides[i], strides[j] = strides[j], strides[i]
*/
__pyx_t_3 = __Pyx_div_long(__pyx_v_ndim, 2);
__pyx_t_4 = __pyx_t_3;
for (__pyx_t_1 = 0; __pyx_t_1 < __pyx_t_4; __pyx_t_1+=1) {
__pyx_v_i = __pyx_t_1;
/* "View.MemoryView":952
* cdef int i, j
* for i in range(ndim / 2):
* j = ndim - 1 - i # <<<<<<<<<<<<<<
* strides[i], strides[j] = strides[j], strides[i]
* shape[i], shape[j] = shape[j], shape[i]
*/
__pyx_v_j = ((__pyx_v_ndim - 1) - __pyx_v_i);
/* "View.MemoryView":953
* for i in range(ndim / 2):
* j = ndim - 1 - i
* strides[i], strides[j] = strides[j], strides[i] # <<<<<<<<<<<<<<
* shape[i], shape[j] = shape[j], shape[i]
*
*/
__pyx_t_5 = (__pyx_v_strides[__pyx_v_j]);
__pyx_t_6 = (__pyx_v_strides[__pyx_v_i]);
(__pyx_v_strides[__pyx_v_i]) = __pyx_t_5;
(__pyx_v_strides[__pyx_v_j]) = __pyx_t_6;
/* "View.MemoryView":954
* j = ndim - 1 - i
* strides[i], strides[j] = strides[j], strides[i]
* shape[i], shape[j] = shape[j], shape[i] # <<<<<<<<<<<<<<
*
* if memslice.suboffsets[i] >= 0 or memslice.suboffsets[j] >= 0:
*/
__pyx_t_6 = (__pyx_v_shape[__pyx_v_j]);
__pyx_t_5 = (__pyx_v_shape[__pyx_v_i]);
(__pyx_v_shape[__pyx_v_i]) = __pyx_t_6;
(__pyx_v_shape[__pyx_v_j]) = __pyx_t_5;
/* "View.MemoryView":956
* shape[i], shape[j] = shape[j], shape[i]
*
* if memslice.suboffsets[i] >= 0 or memslice.suboffsets[j] >= 0: # <<<<<<<<<<<<<<
* _err(ValueError, "Cannot transpose memoryview with indirect dimensions")
*
*/
__pyx_t_8 = (((__pyx_v_memslice->suboffsets[__pyx_v_i]) >= 0) != 0);
if (!__pyx_t_8) {
} else {
__pyx_t_7 = __pyx_t_8;
goto __pyx_L6_bool_binop_done;
}
__pyx_t_8 = (((__pyx_v_memslice->suboffsets[__pyx_v_j]) >= 0) != 0);
__pyx_t_7 = __pyx_t_8;
__pyx_L6_bool_binop_done:;
if (__pyx_t_7) {
/* "View.MemoryView":957
*
* if memslice.suboffsets[i] >= 0 or memslice.suboffsets[j] >= 0:
* _err(ValueError, "Cannot transpose memoryview with indirect dimensions") # <<<<<<<<<<<<<<
*
* return 1
*/
__pyx_t_9 = __pyx_memoryview_err(__pyx_builtin_ValueError, ((char *)"Cannot transpose memoryview with indirect dimensions")); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(2, 957, __pyx_L1_error)
/* "View.MemoryView":956
* shape[i], shape[j] = shape[j], shape[i]
*
* if memslice.suboffsets[i] >= 0 or memslice.suboffsets[j] >= 0: # <<<<<<<<<<<<<<
* _err(ValueError, "Cannot transpose memoryview with indirect dimensions")
*
*/
}
}
/* "View.MemoryView":959
* _err(ValueError, "Cannot transpose memoryview with indirect dimensions")
*
* return 1 # <<<<<<<<<<<<<<
*
*
*/
__pyx_r = 1;
goto __pyx_L0;
/* "View.MemoryView":943
*
* @cname('__pyx_memslice_transpose')
* cdef int transpose_memslice(__Pyx_memviewslice *memslice) nogil except 0: # <<<<<<<<<<<<<<
* cdef int ndim = memslice.memview.view.ndim
*
*/
/* function exit code */
__pyx_L1_error:;
{
#ifdef WITH_THREAD
PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure();
#endif
__Pyx_AddTraceback("View.MemoryView.transpose_memslice", __pyx_clineno, __pyx_lineno, __pyx_filename);
#ifdef WITH_THREAD
__Pyx_PyGILState_Release(__pyx_gilstate_save);
#endif
}
__pyx_r = 0;
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":976
* cdef int (*to_dtype_func)(char *, object) except 0
*
* def __dealloc__(self): # <<<<<<<<<<<<<<
* __PYX_XDEC_MEMVIEW(&self.from_slice, 1)
*
*/
/* Python wrapper */
static void __pyx_memoryviewslice___dealloc__(PyObject *__pyx_v_self); /*proto*/
static void __pyx_memoryviewslice___dealloc__(PyObject *__pyx_v_self) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0);
__pyx_memoryviewslice___pyx_pf_15View_dot_MemoryView_16_memoryviewslice___dealloc__(((struct __pyx_memoryviewslice_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
}
static void __pyx_memoryviewslice___pyx_pf_15View_dot_MemoryView_16_memoryviewslice___dealloc__(struct __pyx_memoryviewslice_obj *__pyx_v_self) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__dealloc__", 0);
/* "View.MemoryView":977
*
* def __dealloc__(self):
* __PYX_XDEC_MEMVIEW(&self.from_slice, 1) # <<<<<<<<<<<<<<
*
* cdef convert_item_to_object(self, char *itemp):
*/
__PYX_XDEC_MEMVIEW((&__pyx_v_self->from_slice), 1);
/* "View.MemoryView":976
* cdef int (*to_dtype_func)(char *, object) except 0
*
* def __dealloc__(self): # <<<<<<<<<<<<<<
* __PYX_XDEC_MEMVIEW(&self.from_slice, 1)
*
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
}
/* "View.MemoryView":979
* __PYX_XDEC_MEMVIEW(&self.from_slice, 1)
*
* cdef convert_item_to_object(self, char *itemp): # <<<<<<<<<<<<<<
* if self.to_object_func != NULL:
* return self.to_object_func(itemp)
*/
static PyObject *__pyx_memoryviewslice_convert_item_to_object(struct __pyx_memoryviewslice_obj *__pyx_v_self, char *__pyx_v_itemp) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
__Pyx_RefNannySetupContext("convert_item_to_object", 0);
/* "View.MemoryView":980
*
* cdef convert_item_to_object(self, char *itemp):
* if self.to_object_func != NULL: # <<<<<<<<<<<<<<
* return self.to_object_func(itemp)
* else:
*/
__pyx_t_1 = ((__pyx_v_self->to_object_func != NULL) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":981
* cdef convert_item_to_object(self, char *itemp):
* if self.to_object_func != NULL:
* return self.to_object_func(itemp) # <<<<<<<<<<<<<<
* else:
* return memoryview.convert_item_to_object(self, itemp)
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __pyx_v_self->to_object_func(__pyx_v_itemp); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 981, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
/* "View.MemoryView":980
*
* cdef convert_item_to_object(self, char *itemp):
* if self.to_object_func != NULL: # <<<<<<<<<<<<<<
* return self.to_object_func(itemp)
* else:
*/
}
/* "View.MemoryView":983
* return self.to_object_func(itemp)
* else:
* return memoryview.convert_item_to_object(self, itemp) # <<<<<<<<<<<<<<
*
* cdef assign_item_from_object(self, char *itemp, object value):
*/
/*else*/ {
__Pyx_XDECREF(__pyx_r);
__pyx_t_2 = __pyx_memoryview_convert_item_to_object(((struct __pyx_memoryview_obj *)__pyx_v_self), __pyx_v_itemp); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 983, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_r = __pyx_t_2;
__pyx_t_2 = 0;
goto __pyx_L0;
}
/* "View.MemoryView":979
* __PYX_XDEC_MEMVIEW(&self.from_slice, 1)
*
* cdef convert_item_to_object(self, char *itemp): # <<<<<<<<<<<<<<
* if self.to_object_func != NULL:
* return self.to_object_func(itemp)
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_AddTraceback("View.MemoryView._memoryviewslice.convert_item_to_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":985
* return memoryview.convert_item_to_object(self, itemp)
*
* cdef assign_item_from_object(self, char *itemp, object value): # <<<<<<<<<<<<<<
* if self.to_dtype_func != NULL:
* self.to_dtype_func(itemp, value)
*/
static PyObject *__pyx_memoryviewslice_assign_item_from_object(struct __pyx_memoryviewslice_obj *__pyx_v_self, char *__pyx_v_itemp, PyObject *__pyx_v_value) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
__Pyx_RefNannySetupContext("assign_item_from_object", 0);
/* "View.MemoryView":986
*
* cdef assign_item_from_object(self, char *itemp, object value):
* if self.to_dtype_func != NULL: # <<<<<<<<<<<<<<
* self.to_dtype_func(itemp, value)
* else:
*/
__pyx_t_1 = ((__pyx_v_self->to_dtype_func != NULL) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":987
* cdef assign_item_from_object(self, char *itemp, object value):
* if self.to_dtype_func != NULL:
* self.to_dtype_func(itemp, value) # <<<<<<<<<<<<<<
* else:
* memoryview.assign_item_from_object(self, itemp, value)
*/
__pyx_t_2 = __pyx_v_self->to_dtype_func(__pyx_v_itemp, __pyx_v_value); if (unlikely(__pyx_t_2 == ((int)0))) __PYX_ERR(2, 987, __pyx_L1_error)
/* "View.MemoryView":986
*
* cdef assign_item_from_object(self, char *itemp, object value):
* if self.to_dtype_func != NULL: # <<<<<<<<<<<<<<
* self.to_dtype_func(itemp, value)
* else:
*/
goto __pyx_L3;
}
/* "View.MemoryView":989
* self.to_dtype_func(itemp, value)
* else:
* memoryview.assign_item_from_object(self, itemp, value) # <<<<<<<<<<<<<<
*
* @property
*/
/*else*/ {
__pyx_t_3 = __pyx_memoryview_assign_item_from_object(((struct __pyx_memoryview_obj *)__pyx_v_self), __pyx_v_itemp, __pyx_v_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 989, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
}
__pyx_L3:;
/* "View.MemoryView":985
* return memoryview.convert_item_to_object(self, itemp)
*
* cdef assign_item_from_object(self, char *itemp, object value): # <<<<<<<<<<<<<<
* if self.to_dtype_func != NULL:
* self.to_dtype_func(itemp, value)
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView._memoryviewslice.assign_item_from_object", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":992
*
* @property
* def base(self): # <<<<<<<<<<<<<<
* return self.from_object
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_16_memoryviewslice_4base_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_15View_dot_MemoryView_16_memoryviewslice_4base_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_15View_dot_MemoryView_16_memoryviewslice_4base___get__(((struct __pyx_memoryviewslice_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView_16_memoryviewslice_4base___get__(struct __pyx_memoryviewslice_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
/* "View.MemoryView":993
* @property
* def base(self):
* return self.from_object # <<<<<<<<<<<<<<
*
* __pyx_getbuffer = capsule(<void *> &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)")
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->from_object);
__pyx_r = __pyx_v_self->from_object;
goto __pyx_L0;
/* "View.MemoryView":992
*
* @property
* def base(self): # <<<<<<<<<<<<<<
* return self.from_object
*
*/
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* Python wrapper */
static PyObject *__pyx_pw___pyx_memoryviewslice_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw___pyx_memoryviewslice_1__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf___pyx_memoryviewslice___reduce_cython__(((struct __pyx_memoryviewslice_obj *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf___pyx_memoryviewslice___reduce_cython__(CYTHON_UNUSED struct __pyx_memoryviewslice_obj *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__25, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(2, 2, __pyx_L1_error)
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView._memoryviewslice.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* Python wrapper */
static PyObject *__pyx_pw___pyx_memoryviewslice_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
static PyObject *__pyx_pw___pyx_memoryviewslice_3__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf___pyx_memoryviewslice_2__setstate_cython__(((struct __pyx_memoryviewslice_obj *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf___pyx_memoryviewslice_2__setstate_cython__(CYTHON_UNUSED struct __pyx_memoryviewslice_obj *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__26, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(2, 4, __pyx_L1_error)
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView._memoryviewslice.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":999
*
* @cname('__pyx_memoryview_fromslice')
* cdef memoryview_fromslice(__Pyx_memviewslice memviewslice, # <<<<<<<<<<<<<<
* int ndim,
* object (*to_object_func)(char *),
*/
static PyObject *__pyx_memoryview_fromslice(__Pyx_memviewslice __pyx_v_memviewslice, int __pyx_v_ndim, PyObject *(*__pyx_v_to_object_func)(char *), int (*__pyx_v_to_dtype_func)(char *, PyObject *), int __pyx_v_dtype_is_object) {
struct __pyx_memoryviewslice_obj *__pyx_v_result = 0;
Py_ssize_t __pyx_v_suboffset;
PyObject *__pyx_v_length = NULL;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
__Pyx_TypeInfo *__pyx_t_4;
Py_buffer __pyx_t_5;
Py_ssize_t *__pyx_t_6;
Py_ssize_t *__pyx_t_7;
Py_ssize_t *__pyx_t_8;
Py_ssize_t __pyx_t_9;
__Pyx_RefNannySetupContext("memoryview_fromslice", 0);
/* "View.MemoryView":1007
* cdef _memoryviewslice result
*
* if <PyObject *> memviewslice.memview == Py_None: # <<<<<<<<<<<<<<
* return None
*
*/
__pyx_t_1 = ((((PyObject *)__pyx_v_memviewslice.memview) == Py_None) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":1008
*
* if <PyObject *> memviewslice.memview == Py_None:
* return None # <<<<<<<<<<<<<<
*
*
*/
__Pyx_XDECREF(__pyx_r);
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
/* "View.MemoryView":1007
* cdef _memoryviewslice result
*
* if <PyObject *> memviewslice.memview == Py_None: # <<<<<<<<<<<<<<
* return None
*
*/
}
/* "View.MemoryView":1013
*
*
* result = _memoryviewslice(None, 0, dtype_is_object) # <<<<<<<<<<<<<<
*
* result.from_slice = memviewslice
*/
__pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_dtype_is_object); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 1013, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 1013, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
PyTuple_SET_ITEM(__pyx_t_3, 0, Py_None);
__Pyx_INCREF(__pyx_int_0);
__Pyx_GIVEREF(__pyx_int_0);
PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_int_0);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_2);
__pyx_t_2 = 0;
__pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)__pyx_memoryviewslice_type), __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 1013, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_v_result = ((struct __pyx_memoryviewslice_obj *)__pyx_t_2);
__pyx_t_2 = 0;
/* "View.MemoryView":1015
* result = _memoryviewslice(None, 0, dtype_is_object)
*
* result.from_slice = memviewslice # <<<<<<<<<<<<<<
* __PYX_INC_MEMVIEW(&memviewslice, 1)
*
*/
__pyx_v_result->from_slice = __pyx_v_memviewslice;
/* "View.MemoryView":1016
*
* result.from_slice = memviewslice
* __PYX_INC_MEMVIEW(&memviewslice, 1) # <<<<<<<<<<<<<<
*
* result.from_object = (<memoryview> memviewslice.memview).base
*/
__PYX_INC_MEMVIEW((&__pyx_v_memviewslice), 1);
/* "View.MemoryView":1018
* __PYX_INC_MEMVIEW(&memviewslice, 1)
*
* result.from_object = (<memoryview> memviewslice.memview).base # <<<<<<<<<<<<<<
* result.typeinfo = memviewslice.memview.typeinfo
*
*/
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_memviewslice.memview), __pyx_n_s_base); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 1018, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_GIVEREF(__pyx_t_2);
__Pyx_GOTREF(__pyx_v_result->from_object);
__Pyx_DECREF(__pyx_v_result->from_object);
__pyx_v_result->from_object = __pyx_t_2;
__pyx_t_2 = 0;
/* "View.MemoryView":1019
*
* result.from_object = (<memoryview> memviewslice.memview).base
* result.typeinfo = memviewslice.memview.typeinfo # <<<<<<<<<<<<<<
*
* result.view = memviewslice.memview.view
*/
__pyx_t_4 = __pyx_v_memviewslice.memview->typeinfo;
__pyx_v_result->__pyx_base.typeinfo = __pyx_t_4;
/* "View.MemoryView":1021
* result.typeinfo = memviewslice.memview.typeinfo
*
* result.view = memviewslice.memview.view # <<<<<<<<<<<<<<
* result.view.buf = <void *> memviewslice.data
* result.view.ndim = ndim
*/
__pyx_t_5 = __pyx_v_memviewslice.memview->view;
__pyx_v_result->__pyx_base.view = __pyx_t_5;
/* "View.MemoryView":1022
*
* result.view = memviewslice.memview.view
* result.view.buf = <void *> memviewslice.data # <<<<<<<<<<<<<<
* result.view.ndim = ndim
* (<__pyx_buffer *> &result.view).obj = Py_None
*/
__pyx_v_result->__pyx_base.view.buf = ((void *)__pyx_v_memviewslice.data);
/* "View.MemoryView":1023
* result.view = memviewslice.memview.view
* result.view.buf = <void *> memviewslice.data
* result.view.ndim = ndim # <<<<<<<<<<<<<<
* (<__pyx_buffer *> &result.view).obj = Py_None
* Py_INCREF(Py_None)
*/
__pyx_v_result->__pyx_base.view.ndim = __pyx_v_ndim;
/* "View.MemoryView":1024
* result.view.buf = <void *> memviewslice.data
* result.view.ndim = ndim
* (<__pyx_buffer *> &result.view).obj = Py_None # <<<<<<<<<<<<<<
* Py_INCREF(Py_None)
*
*/
((Py_buffer *)(&__pyx_v_result->__pyx_base.view))->obj = Py_None;
/* "View.MemoryView":1025
* result.view.ndim = ndim
* (<__pyx_buffer *> &result.view).obj = Py_None
* Py_INCREF(Py_None) # <<<<<<<<<<<<<<
*
* if (<memoryview>memviewslice.memview).flags & PyBUF_WRITABLE:
*/
Py_INCREF(Py_None);
/* "View.MemoryView":1027
* Py_INCREF(Py_None)
*
* if (<memoryview>memviewslice.memview).flags & PyBUF_WRITABLE: # <<<<<<<<<<<<<<
* result.flags = PyBUF_RECORDS
* else:
*/
__pyx_t_1 = ((((struct __pyx_memoryview_obj *)__pyx_v_memviewslice.memview)->flags & PyBUF_WRITABLE) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":1028
*
* if (<memoryview>memviewslice.memview).flags & PyBUF_WRITABLE:
* result.flags = PyBUF_RECORDS # <<<<<<<<<<<<<<
* else:
* result.flags = PyBUF_RECORDS_RO
*/
__pyx_v_result->__pyx_base.flags = PyBUF_RECORDS;
/* "View.MemoryView":1027
* Py_INCREF(Py_None)
*
* if (<memoryview>memviewslice.memview).flags & PyBUF_WRITABLE: # <<<<<<<<<<<<<<
* result.flags = PyBUF_RECORDS
* else:
*/
goto __pyx_L4;
}
/* "View.MemoryView":1030
* result.flags = PyBUF_RECORDS
* else:
* result.flags = PyBUF_RECORDS_RO # <<<<<<<<<<<<<<
*
* result.view.shape = <Py_ssize_t *> result.from_slice.shape
*/
/*else*/ {
__pyx_v_result->__pyx_base.flags = PyBUF_RECORDS_RO;
}
__pyx_L4:;
/* "View.MemoryView":1032
* result.flags = PyBUF_RECORDS_RO
*
* result.view.shape = <Py_ssize_t *> result.from_slice.shape # <<<<<<<<<<<<<<
* result.view.strides = <Py_ssize_t *> result.from_slice.strides
*
*/
__pyx_v_result->__pyx_base.view.shape = ((Py_ssize_t *)__pyx_v_result->from_slice.shape);
/* "View.MemoryView":1033
*
* result.view.shape = <Py_ssize_t *> result.from_slice.shape
* result.view.strides = <Py_ssize_t *> result.from_slice.strides # <<<<<<<<<<<<<<
*
*
*/
__pyx_v_result->__pyx_base.view.strides = ((Py_ssize_t *)__pyx_v_result->from_slice.strides);
/* "View.MemoryView":1036
*
*
* result.view.suboffsets = NULL # <<<<<<<<<<<<<<
* for suboffset in result.from_slice.suboffsets[:ndim]:
* if suboffset >= 0:
*/
__pyx_v_result->__pyx_base.view.suboffsets = NULL;
/* "View.MemoryView":1037
*
* result.view.suboffsets = NULL
* for suboffset in result.from_slice.suboffsets[:ndim]: # <<<<<<<<<<<<<<
* if suboffset >= 0:
* result.view.suboffsets = <Py_ssize_t *> result.from_slice.suboffsets
*/
__pyx_t_7 = (__pyx_v_result->from_slice.suboffsets + __pyx_v_ndim);
for (__pyx_t_8 = __pyx_v_result->from_slice.suboffsets; __pyx_t_8 < __pyx_t_7; __pyx_t_8++) {
__pyx_t_6 = __pyx_t_8;
__pyx_v_suboffset = (__pyx_t_6[0]);
/* "View.MemoryView":1038
* result.view.suboffsets = NULL
* for suboffset in result.from_slice.suboffsets[:ndim]:
* if suboffset >= 0: # <<<<<<<<<<<<<<
* result.view.suboffsets = <Py_ssize_t *> result.from_slice.suboffsets
* break
*/
__pyx_t_1 = ((__pyx_v_suboffset >= 0) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":1039
* for suboffset in result.from_slice.suboffsets[:ndim]:
* if suboffset >= 0:
* result.view.suboffsets = <Py_ssize_t *> result.from_slice.suboffsets # <<<<<<<<<<<<<<
* break
*
*/
__pyx_v_result->__pyx_base.view.suboffsets = ((Py_ssize_t *)__pyx_v_result->from_slice.suboffsets);
/* "View.MemoryView":1040
* if suboffset >= 0:
* result.view.suboffsets = <Py_ssize_t *> result.from_slice.suboffsets
* break # <<<<<<<<<<<<<<
*
* result.view.len = result.view.itemsize
*/
goto __pyx_L6_break;
/* "View.MemoryView":1038
* result.view.suboffsets = NULL
* for suboffset in result.from_slice.suboffsets[:ndim]:
* if suboffset >= 0: # <<<<<<<<<<<<<<
* result.view.suboffsets = <Py_ssize_t *> result.from_slice.suboffsets
* break
*/
}
}
__pyx_L6_break:;
/* "View.MemoryView":1042
* break
*
* result.view.len = result.view.itemsize # <<<<<<<<<<<<<<
* for length in result.view.shape[:ndim]:
* result.view.len *= length
*/
__pyx_t_9 = __pyx_v_result->__pyx_base.view.itemsize;
__pyx_v_result->__pyx_base.view.len = __pyx_t_9;
/* "View.MemoryView":1043
*
* result.view.len = result.view.itemsize
* for length in result.view.shape[:ndim]: # <<<<<<<<<<<<<<
* result.view.len *= length
*
*/
__pyx_t_7 = (__pyx_v_result->__pyx_base.view.shape + __pyx_v_ndim);
for (__pyx_t_8 = __pyx_v_result->__pyx_base.view.shape; __pyx_t_8 < __pyx_t_7; __pyx_t_8++) {
__pyx_t_6 = __pyx_t_8;
__pyx_t_2 = PyInt_FromSsize_t((__pyx_t_6[0])); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 1043, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_XDECREF_SET(__pyx_v_length, __pyx_t_2);
__pyx_t_2 = 0;
/* "View.MemoryView":1044
* result.view.len = result.view.itemsize
* for length in result.view.shape[:ndim]:
* result.view.len *= length # <<<<<<<<<<<<<<
*
* result.to_object_func = to_object_func
*/
__pyx_t_2 = PyInt_FromSsize_t(__pyx_v_result->__pyx_base.view.len); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 1044, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = PyNumber_InPlaceMultiply(__pyx_t_2, __pyx_v_length); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 1044, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_9 = __Pyx_PyIndex_AsSsize_t(__pyx_t_3); if (unlikely((__pyx_t_9 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(2, 1044, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__pyx_v_result->__pyx_base.view.len = __pyx_t_9;
}
/* "View.MemoryView":1046
* result.view.len *= length
*
* result.to_object_func = to_object_func # <<<<<<<<<<<<<<
* result.to_dtype_func = to_dtype_func
*
*/
__pyx_v_result->to_object_func = __pyx_v_to_object_func;
/* "View.MemoryView":1047
*
* result.to_object_func = to_object_func
* result.to_dtype_func = to_dtype_func # <<<<<<<<<<<<<<
*
* return result
*/
__pyx_v_result->to_dtype_func = __pyx_v_to_dtype_func;
/* "View.MemoryView":1049
* result.to_dtype_func = to_dtype_func
*
* return result # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_get_slice_from_memoryview')
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(((PyObject *)__pyx_v_result));
__pyx_r = ((PyObject *)__pyx_v_result);
goto __pyx_L0;
/* "View.MemoryView":999
*
* @cname('__pyx_memoryview_fromslice')
* cdef memoryview_fromslice(__Pyx_memviewslice memviewslice, # <<<<<<<<<<<<<<
* int ndim,
* object (*to_object_func)(char *),
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.memoryview_fromslice", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_result);
__Pyx_XDECREF(__pyx_v_length);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":1052
*
* @cname('__pyx_memoryview_get_slice_from_memoryview')
* cdef __Pyx_memviewslice *get_slice_from_memview(memoryview memview, # <<<<<<<<<<<<<<
* __Pyx_memviewslice *mslice) except NULL:
* cdef _memoryviewslice obj
*/
static __Pyx_memviewslice *__pyx_memoryview_get_slice_from_memoryview(struct __pyx_memoryview_obj *__pyx_v_memview, __Pyx_memviewslice *__pyx_v_mslice) {
struct __pyx_memoryviewslice_obj *__pyx_v_obj = 0;
__Pyx_memviewslice *__pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
__Pyx_RefNannySetupContext("get_slice_from_memview", 0);
/* "View.MemoryView":1055
* __Pyx_memviewslice *mslice) except NULL:
* cdef _memoryviewslice obj
* if isinstance(memview, _memoryviewslice): # <<<<<<<<<<<<<<
* obj = memview
* return &obj.from_slice
*/
__pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_memview), __pyx_memoryviewslice_type);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1056
* cdef _memoryviewslice obj
* if isinstance(memview, _memoryviewslice):
* obj = memview # <<<<<<<<<<<<<<
* return &obj.from_slice
* else:
*/
if (!(likely(((((PyObject *)__pyx_v_memview)) == Py_None) || likely(__Pyx_TypeTest(((PyObject *)__pyx_v_memview), __pyx_memoryviewslice_type))))) __PYX_ERR(2, 1056, __pyx_L1_error)
__pyx_t_3 = ((PyObject *)__pyx_v_memview);
__Pyx_INCREF(__pyx_t_3);
__pyx_v_obj = ((struct __pyx_memoryviewslice_obj *)__pyx_t_3);
__pyx_t_3 = 0;
/* "View.MemoryView":1057
* if isinstance(memview, _memoryviewslice):
* obj = memview
* return &obj.from_slice # <<<<<<<<<<<<<<
* else:
* slice_copy(memview, mslice)
*/
__pyx_r = (&__pyx_v_obj->from_slice);
goto __pyx_L0;
/* "View.MemoryView":1055
* __Pyx_memviewslice *mslice) except NULL:
* cdef _memoryviewslice obj
* if isinstance(memview, _memoryviewslice): # <<<<<<<<<<<<<<
* obj = memview
* return &obj.from_slice
*/
}
/* "View.MemoryView":1059
* return &obj.from_slice
* else:
* slice_copy(memview, mslice) # <<<<<<<<<<<<<<
* return mslice
*
*/
/*else*/ {
__pyx_memoryview_slice_copy(__pyx_v_memview, __pyx_v_mslice);
/* "View.MemoryView":1060
* else:
* slice_copy(memview, mslice)
* return mslice # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_slice_copy')
*/
__pyx_r = __pyx_v_mslice;
goto __pyx_L0;
}
/* "View.MemoryView":1052
*
* @cname('__pyx_memoryview_get_slice_from_memoryview')
* cdef __Pyx_memviewslice *get_slice_from_memview(memoryview memview, # <<<<<<<<<<<<<<
* __Pyx_memviewslice *mslice) except NULL:
* cdef _memoryviewslice obj
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_3);
__Pyx_AddTraceback("View.MemoryView.get_slice_from_memview", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF((PyObject *)__pyx_v_obj);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":1063
*
* @cname('__pyx_memoryview_slice_copy')
* cdef void slice_copy(memoryview memview, __Pyx_memviewslice *dst): # <<<<<<<<<<<<<<
* cdef int dim
* cdef (Py_ssize_t*) shape, strides, suboffsets
*/
static void __pyx_memoryview_slice_copy(struct __pyx_memoryview_obj *__pyx_v_memview, __Pyx_memviewslice *__pyx_v_dst) {
int __pyx_v_dim;
Py_ssize_t *__pyx_v_shape;
Py_ssize_t *__pyx_v_strides;
Py_ssize_t *__pyx_v_suboffsets;
__Pyx_RefNannyDeclarations
Py_ssize_t *__pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_t_4;
Py_ssize_t __pyx_t_5;
__Pyx_RefNannySetupContext("slice_copy", 0);
/* "View.MemoryView":1067
* cdef (Py_ssize_t*) shape, strides, suboffsets
*
* shape = memview.view.shape # <<<<<<<<<<<<<<
* strides = memview.view.strides
* suboffsets = memview.view.suboffsets
*/
__pyx_t_1 = __pyx_v_memview->view.shape;
__pyx_v_shape = __pyx_t_1;
/* "View.MemoryView":1068
*
* shape = memview.view.shape
* strides = memview.view.strides # <<<<<<<<<<<<<<
* suboffsets = memview.view.suboffsets
*
*/
__pyx_t_1 = __pyx_v_memview->view.strides;
__pyx_v_strides = __pyx_t_1;
/* "View.MemoryView":1069
* shape = memview.view.shape
* strides = memview.view.strides
* suboffsets = memview.view.suboffsets # <<<<<<<<<<<<<<
*
* dst.memview = <__pyx_memoryview *> memview
*/
__pyx_t_1 = __pyx_v_memview->view.suboffsets;
__pyx_v_suboffsets = __pyx_t_1;
/* "View.MemoryView":1071
* suboffsets = memview.view.suboffsets
*
* dst.memview = <__pyx_memoryview *> memview # <<<<<<<<<<<<<<
* dst.data = <char *> memview.view.buf
*
*/
__pyx_v_dst->memview = ((struct __pyx_memoryview_obj *)__pyx_v_memview);
/* "View.MemoryView":1072
*
* dst.memview = <__pyx_memoryview *> memview
* dst.data = <char *> memview.view.buf # <<<<<<<<<<<<<<
*
* for dim in range(memview.view.ndim):
*/
__pyx_v_dst->data = ((char *)__pyx_v_memview->view.buf);
/* "View.MemoryView":1074
* dst.data = <char *> memview.view.buf
*
* for dim in range(memview.view.ndim): # <<<<<<<<<<<<<<
* dst.shape[dim] = shape[dim]
* dst.strides[dim] = strides[dim]
*/
__pyx_t_2 = __pyx_v_memview->view.ndim;
__pyx_t_3 = __pyx_t_2;
for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {
__pyx_v_dim = __pyx_t_4;
/* "View.MemoryView":1075
*
* for dim in range(memview.view.ndim):
* dst.shape[dim] = shape[dim] # <<<<<<<<<<<<<<
* dst.strides[dim] = strides[dim]
* dst.suboffsets[dim] = suboffsets[dim] if suboffsets else -1
*/
(__pyx_v_dst->shape[__pyx_v_dim]) = (__pyx_v_shape[__pyx_v_dim]);
/* "View.MemoryView":1076
* for dim in range(memview.view.ndim):
* dst.shape[dim] = shape[dim]
* dst.strides[dim] = strides[dim] # <<<<<<<<<<<<<<
* dst.suboffsets[dim] = suboffsets[dim] if suboffsets else -1
*
*/
(__pyx_v_dst->strides[__pyx_v_dim]) = (__pyx_v_strides[__pyx_v_dim]);
/* "View.MemoryView":1077
* dst.shape[dim] = shape[dim]
* dst.strides[dim] = strides[dim]
* dst.suboffsets[dim] = suboffsets[dim] if suboffsets else -1 # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_copy_object')
*/
if ((__pyx_v_suboffsets != 0)) {
__pyx_t_5 = (__pyx_v_suboffsets[__pyx_v_dim]);
} else {
__pyx_t_5 = -1L;
}
(__pyx_v_dst->suboffsets[__pyx_v_dim]) = __pyx_t_5;
}
/* "View.MemoryView":1063
*
* @cname('__pyx_memoryview_slice_copy')
* cdef void slice_copy(memoryview memview, __Pyx_memviewslice *dst): # <<<<<<<<<<<<<<
* cdef int dim
* cdef (Py_ssize_t*) shape, strides, suboffsets
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
}
/* "View.MemoryView":1080
*
* @cname('__pyx_memoryview_copy_object')
* cdef memoryview_copy(memoryview memview): # <<<<<<<<<<<<<<
* "Create a new memoryview object"
* cdef __Pyx_memviewslice memviewslice
*/
static PyObject *__pyx_memoryview_copy_object(struct __pyx_memoryview_obj *__pyx_v_memview) {
__Pyx_memviewslice __pyx_v_memviewslice;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("memoryview_copy", 0);
/* "View.MemoryView":1083
* "Create a new memoryview object"
* cdef __Pyx_memviewslice memviewslice
* slice_copy(memview, &memviewslice) # <<<<<<<<<<<<<<
* return memoryview_copy_from_slice(memview, &memviewslice)
*
*/
__pyx_memoryview_slice_copy(__pyx_v_memview, (&__pyx_v_memviewslice));
/* "View.MemoryView":1084
* cdef __Pyx_memviewslice memviewslice
* slice_copy(memview, &memviewslice)
* return memoryview_copy_from_slice(memview, &memviewslice) # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_copy_object_from_slice')
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __pyx_memoryview_copy_object_from_slice(__pyx_v_memview, (&__pyx_v_memviewslice)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1084, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "View.MemoryView":1080
*
* @cname('__pyx_memoryview_copy_object')
* cdef memoryview_copy(memoryview memview): # <<<<<<<<<<<<<<
* "Create a new memoryview object"
* cdef __Pyx_memviewslice memviewslice
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("View.MemoryView.memoryview_copy", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":1087
*
* @cname('__pyx_memoryview_copy_object_from_slice')
* cdef memoryview_copy_from_slice(memoryview memview, __Pyx_memviewslice *memviewslice): # <<<<<<<<<<<<<<
* """
* Create a new memoryview object from a given memoryview object and slice.
*/
static PyObject *__pyx_memoryview_copy_object_from_slice(struct __pyx_memoryview_obj *__pyx_v_memview, __Pyx_memviewslice *__pyx_v_memviewslice) {
PyObject *(*__pyx_v_to_object_func)(char *);
int (*__pyx_v_to_dtype_func)(char *, PyObject *);
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
PyObject *(*__pyx_t_3)(char *);
int (*__pyx_t_4)(char *, PyObject *);
PyObject *__pyx_t_5 = NULL;
__Pyx_RefNannySetupContext("memoryview_copy_from_slice", 0);
/* "View.MemoryView":1094
* cdef int (*to_dtype_func)(char *, object) except 0
*
* if isinstance(memview, _memoryviewslice): # <<<<<<<<<<<<<<
* to_object_func = (<_memoryviewslice> memview).to_object_func
* to_dtype_func = (<_memoryviewslice> memview).to_dtype_func
*/
__pyx_t_1 = __Pyx_TypeCheck(((PyObject *)__pyx_v_memview), __pyx_memoryviewslice_type);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1095
*
* if isinstance(memview, _memoryviewslice):
* to_object_func = (<_memoryviewslice> memview).to_object_func # <<<<<<<<<<<<<<
* to_dtype_func = (<_memoryviewslice> memview).to_dtype_func
* else:
*/
__pyx_t_3 = ((struct __pyx_memoryviewslice_obj *)__pyx_v_memview)->to_object_func;
__pyx_v_to_object_func = __pyx_t_3;
/* "View.MemoryView":1096
* if isinstance(memview, _memoryviewslice):
* to_object_func = (<_memoryviewslice> memview).to_object_func
* to_dtype_func = (<_memoryviewslice> memview).to_dtype_func # <<<<<<<<<<<<<<
* else:
* to_object_func = NULL
*/
__pyx_t_4 = ((struct __pyx_memoryviewslice_obj *)__pyx_v_memview)->to_dtype_func;
__pyx_v_to_dtype_func = __pyx_t_4;
/* "View.MemoryView":1094
* cdef int (*to_dtype_func)(char *, object) except 0
*
* if isinstance(memview, _memoryviewslice): # <<<<<<<<<<<<<<
* to_object_func = (<_memoryviewslice> memview).to_object_func
* to_dtype_func = (<_memoryviewslice> memview).to_dtype_func
*/
goto __pyx_L3;
}
/* "View.MemoryView":1098
* to_dtype_func = (<_memoryviewslice> memview).to_dtype_func
* else:
* to_object_func = NULL # <<<<<<<<<<<<<<
* to_dtype_func = NULL
*
*/
/*else*/ {
__pyx_v_to_object_func = NULL;
/* "View.MemoryView":1099
* else:
* to_object_func = NULL
* to_dtype_func = NULL # <<<<<<<<<<<<<<
*
* return memoryview_fromslice(memviewslice[0], memview.view.ndim,
*/
__pyx_v_to_dtype_func = NULL;
}
__pyx_L3:;
/* "View.MemoryView":1101
* to_dtype_func = NULL
*
* return memoryview_fromslice(memviewslice[0], memview.view.ndim, # <<<<<<<<<<<<<<
* to_object_func, to_dtype_func,
* memview.dtype_is_object)
*/
__Pyx_XDECREF(__pyx_r);
/* "View.MemoryView":1103
* return memoryview_fromslice(memviewslice[0], memview.view.ndim,
* to_object_func, to_dtype_func,
* memview.dtype_is_object) # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_5 = __pyx_memoryview_fromslice((__pyx_v_memviewslice[0]), __pyx_v_memview->view.ndim, __pyx_v_to_object_func, __pyx_v_to_dtype_func, __pyx_v_memview->dtype_is_object); if (unlikely(!__pyx_t_5)) __PYX_ERR(2, 1101, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__pyx_r = __pyx_t_5;
__pyx_t_5 = 0;
goto __pyx_L0;
/* "View.MemoryView":1087
*
* @cname('__pyx_memoryview_copy_object_from_slice')
* cdef memoryview_copy_from_slice(memoryview memview, __Pyx_memviewslice *memviewslice): # <<<<<<<<<<<<<<
* """
* Create a new memoryview object from a given memoryview object and slice.
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView.memoryview_copy_from_slice", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "View.MemoryView":1109
*
*
* cdef Py_ssize_t abs_py_ssize_t(Py_ssize_t arg) nogil: # <<<<<<<<<<<<<<
* if arg < 0:
* return -arg
*/
static Py_ssize_t abs_py_ssize_t(Py_ssize_t __pyx_v_arg) {
Py_ssize_t __pyx_r;
int __pyx_t_1;
/* "View.MemoryView":1110
*
* cdef Py_ssize_t abs_py_ssize_t(Py_ssize_t arg) nogil:
* if arg < 0: # <<<<<<<<<<<<<<
* return -arg
* else:
*/
__pyx_t_1 = ((__pyx_v_arg < 0) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":1111
* cdef Py_ssize_t abs_py_ssize_t(Py_ssize_t arg) nogil:
* if arg < 0:
* return -arg # <<<<<<<<<<<<<<
* else:
* return arg
*/
__pyx_r = (-__pyx_v_arg);
goto __pyx_L0;
/* "View.MemoryView":1110
*
* cdef Py_ssize_t abs_py_ssize_t(Py_ssize_t arg) nogil:
* if arg < 0: # <<<<<<<<<<<<<<
* return -arg
* else:
*/
}
/* "View.MemoryView":1113
* return -arg
* else:
* return arg # <<<<<<<<<<<<<<
*
* @cname('__pyx_get_best_slice_order')
*/
/*else*/ {
__pyx_r = __pyx_v_arg;
goto __pyx_L0;
}
/* "View.MemoryView":1109
*
*
* cdef Py_ssize_t abs_py_ssize_t(Py_ssize_t arg) nogil: # <<<<<<<<<<<<<<
* if arg < 0:
* return -arg
*/
/* function exit code */
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":1116
*
* @cname('__pyx_get_best_slice_order')
* cdef char get_best_order(__Pyx_memviewslice *mslice, int ndim) nogil: # <<<<<<<<<<<<<<
* """
* Figure out the best memory access order for a given slice.
*/
static char __pyx_get_best_slice_order(__Pyx_memviewslice *__pyx_v_mslice, int __pyx_v_ndim) {
int __pyx_v_i;
Py_ssize_t __pyx_v_c_stride;
Py_ssize_t __pyx_v_f_stride;
char __pyx_r;
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_t_4;
/* "View.MemoryView":1121
* """
* cdef int i
* cdef Py_ssize_t c_stride = 0 # <<<<<<<<<<<<<<
* cdef Py_ssize_t f_stride = 0
*
*/
__pyx_v_c_stride = 0;
/* "View.MemoryView":1122
* cdef int i
* cdef Py_ssize_t c_stride = 0
* cdef Py_ssize_t f_stride = 0 # <<<<<<<<<<<<<<
*
* for i in range(ndim - 1, -1, -1):
*/
__pyx_v_f_stride = 0;
/* "View.MemoryView":1124
* cdef Py_ssize_t f_stride = 0
*
* for i in range(ndim - 1, -1, -1): # <<<<<<<<<<<<<<
* if mslice.shape[i] > 1:
* c_stride = mslice.strides[i]
*/
for (__pyx_t_1 = (__pyx_v_ndim - 1); __pyx_t_1 > -1; __pyx_t_1-=1) {
__pyx_v_i = __pyx_t_1;
/* "View.MemoryView":1125
*
* for i in range(ndim - 1, -1, -1):
* if mslice.shape[i] > 1: # <<<<<<<<<<<<<<
* c_stride = mslice.strides[i]
* break
*/
__pyx_t_2 = (((__pyx_v_mslice->shape[__pyx_v_i]) > 1) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1126
* for i in range(ndim - 1, -1, -1):
* if mslice.shape[i] > 1:
* c_stride = mslice.strides[i] # <<<<<<<<<<<<<<
* break
*
*/
__pyx_v_c_stride = (__pyx_v_mslice->strides[__pyx_v_i]);
/* "View.MemoryView":1127
* if mslice.shape[i] > 1:
* c_stride = mslice.strides[i]
* break # <<<<<<<<<<<<<<
*
* for i in range(ndim):
*/
goto __pyx_L4_break;
/* "View.MemoryView":1125
*
* for i in range(ndim - 1, -1, -1):
* if mslice.shape[i] > 1: # <<<<<<<<<<<<<<
* c_stride = mslice.strides[i]
* break
*/
}
}
__pyx_L4_break:;
/* "View.MemoryView":1129
* break
*
* for i in range(ndim): # <<<<<<<<<<<<<<
* if mslice.shape[i] > 1:
* f_stride = mslice.strides[i]
*/
__pyx_t_1 = __pyx_v_ndim;
__pyx_t_3 = __pyx_t_1;
for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {
__pyx_v_i = __pyx_t_4;
/* "View.MemoryView":1130
*
* for i in range(ndim):
* if mslice.shape[i] > 1: # <<<<<<<<<<<<<<
* f_stride = mslice.strides[i]
* break
*/
__pyx_t_2 = (((__pyx_v_mslice->shape[__pyx_v_i]) > 1) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1131
* for i in range(ndim):
* if mslice.shape[i] > 1:
* f_stride = mslice.strides[i] # <<<<<<<<<<<<<<
* break
*
*/
__pyx_v_f_stride = (__pyx_v_mslice->strides[__pyx_v_i]);
/* "View.MemoryView":1132
* if mslice.shape[i] > 1:
* f_stride = mslice.strides[i]
* break # <<<<<<<<<<<<<<
*
* if abs_py_ssize_t(c_stride) <= abs_py_ssize_t(f_stride):
*/
goto __pyx_L7_break;
/* "View.MemoryView":1130
*
* for i in range(ndim):
* if mslice.shape[i] > 1: # <<<<<<<<<<<<<<
* f_stride = mslice.strides[i]
* break
*/
}
}
__pyx_L7_break:;
/* "View.MemoryView":1134
* break
*
* if abs_py_ssize_t(c_stride) <= abs_py_ssize_t(f_stride): # <<<<<<<<<<<<<<
* return 'C'
* else:
*/
__pyx_t_2 = ((abs_py_ssize_t(__pyx_v_c_stride) <= abs_py_ssize_t(__pyx_v_f_stride)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1135
*
* if abs_py_ssize_t(c_stride) <= abs_py_ssize_t(f_stride):
* return 'C' # <<<<<<<<<<<<<<
* else:
* return 'F'
*/
__pyx_r = 'C';
goto __pyx_L0;
/* "View.MemoryView":1134
* break
*
* if abs_py_ssize_t(c_stride) <= abs_py_ssize_t(f_stride): # <<<<<<<<<<<<<<
* return 'C'
* else:
*/
}
/* "View.MemoryView":1137
* return 'C'
* else:
* return 'F' # <<<<<<<<<<<<<<
*
* @cython.cdivision(True)
*/
/*else*/ {
__pyx_r = 'F';
goto __pyx_L0;
}
/* "View.MemoryView":1116
*
* @cname('__pyx_get_best_slice_order')
* cdef char get_best_order(__Pyx_memviewslice *mslice, int ndim) nogil: # <<<<<<<<<<<<<<
* """
* Figure out the best memory access order for a given slice.
*/
/* function exit code */
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":1140
*
* @cython.cdivision(True)
* cdef void _copy_strided_to_strided(char *src_data, Py_ssize_t *src_strides, # <<<<<<<<<<<<<<
* char *dst_data, Py_ssize_t *dst_strides,
* Py_ssize_t *src_shape, Py_ssize_t *dst_shape,
*/
static void _copy_strided_to_strided(char *__pyx_v_src_data, Py_ssize_t *__pyx_v_src_strides, char *__pyx_v_dst_data, Py_ssize_t *__pyx_v_dst_strides, Py_ssize_t *__pyx_v_src_shape, Py_ssize_t *__pyx_v_dst_shape, int __pyx_v_ndim, size_t __pyx_v_itemsize) {
CYTHON_UNUSED Py_ssize_t __pyx_v_i;
CYTHON_UNUSED Py_ssize_t __pyx_v_src_extent;
Py_ssize_t __pyx_v_dst_extent;
Py_ssize_t __pyx_v_src_stride;
Py_ssize_t __pyx_v_dst_stride;
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
Py_ssize_t __pyx_t_4;
Py_ssize_t __pyx_t_5;
Py_ssize_t __pyx_t_6;
/* "View.MemoryView":1147
*
* cdef Py_ssize_t i
* cdef Py_ssize_t src_extent = src_shape[0] # <<<<<<<<<<<<<<
* cdef Py_ssize_t dst_extent = dst_shape[0]
* cdef Py_ssize_t src_stride = src_strides[0]
*/
__pyx_v_src_extent = (__pyx_v_src_shape[0]);
/* "View.MemoryView":1148
* cdef Py_ssize_t i
* cdef Py_ssize_t src_extent = src_shape[0]
* cdef Py_ssize_t dst_extent = dst_shape[0] # <<<<<<<<<<<<<<
* cdef Py_ssize_t src_stride = src_strides[0]
* cdef Py_ssize_t dst_stride = dst_strides[0]
*/
__pyx_v_dst_extent = (__pyx_v_dst_shape[0]);
/* "View.MemoryView":1149
* cdef Py_ssize_t src_extent = src_shape[0]
* cdef Py_ssize_t dst_extent = dst_shape[0]
* cdef Py_ssize_t src_stride = src_strides[0] # <<<<<<<<<<<<<<
* cdef Py_ssize_t dst_stride = dst_strides[0]
*
*/
__pyx_v_src_stride = (__pyx_v_src_strides[0]);
/* "View.MemoryView":1150
* cdef Py_ssize_t dst_extent = dst_shape[0]
* cdef Py_ssize_t src_stride = src_strides[0]
* cdef Py_ssize_t dst_stride = dst_strides[0] # <<<<<<<<<<<<<<
*
* if ndim == 1:
*/
__pyx_v_dst_stride = (__pyx_v_dst_strides[0]);
/* "View.MemoryView":1152
* cdef Py_ssize_t dst_stride = dst_strides[0]
*
* if ndim == 1: # <<<<<<<<<<<<<<
* if (src_stride > 0 and dst_stride > 0 and
* <size_t> src_stride == itemsize == <size_t> dst_stride):
*/
__pyx_t_1 = ((__pyx_v_ndim == 1) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":1153
*
* if ndim == 1:
* if (src_stride > 0 and dst_stride > 0 and # <<<<<<<<<<<<<<
* <size_t> src_stride == itemsize == <size_t> dst_stride):
* memcpy(dst_data, src_data, itemsize * dst_extent)
*/
__pyx_t_2 = ((__pyx_v_src_stride > 0) != 0);
if (__pyx_t_2) {
} else {
__pyx_t_1 = __pyx_t_2;
goto __pyx_L5_bool_binop_done;
}
__pyx_t_2 = ((__pyx_v_dst_stride > 0) != 0);
if (__pyx_t_2) {
} else {
__pyx_t_1 = __pyx_t_2;
goto __pyx_L5_bool_binop_done;
}
/* "View.MemoryView":1154
* if ndim == 1:
* if (src_stride > 0 and dst_stride > 0 and
* <size_t> src_stride == itemsize == <size_t> dst_stride): # <<<<<<<<<<<<<<
* memcpy(dst_data, src_data, itemsize * dst_extent)
* else:
*/
__pyx_t_2 = (((size_t)__pyx_v_src_stride) == __pyx_v_itemsize);
if (__pyx_t_2) {
__pyx_t_2 = (__pyx_v_itemsize == ((size_t)__pyx_v_dst_stride));
}
__pyx_t_3 = (__pyx_t_2 != 0);
__pyx_t_1 = __pyx_t_3;
__pyx_L5_bool_binop_done:;
/* "View.MemoryView":1153
*
* if ndim == 1:
* if (src_stride > 0 and dst_stride > 0 and # <<<<<<<<<<<<<<
* <size_t> src_stride == itemsize == <size_t> dst_stride):
* memcpy(dst_data, src_data, itemsize * dst_extent)
*/
if (__pyx_t_1) {
/* "View.MemoryView":1155
* if (src_stride > 0 and dst_stride > 0 and
* <size_t> src_stride == itemsize == <size_t> dst_stride):
* memcpy(dst_data, src_data, itemsize * dst_extent) # <<<<<<<<<<<<<<
* else:
* for i in range(dst_extent):
*/
(void)(memcpy(__pyx_v_dst_data, __pyx_v_src_data, (__pyx_v_itemsize * __pyx_v_dst_extent)));
/* "View.MemoryView":1153
*
* if ndim == 1:
* if (src_stride > 0 and dst_stride > 0 and # <<<<<<<<<<<<<<
* <size_t> src_stride == itemsize == <size_t> dst_stride):
* memcpy(dst_data, src_data, itemsize * dst_extent)
*/
goto __pyx_L4;
}
/* "View.MemoryView":1157
* memcpy(dst_data, src_data, itemsize * dst_extent)
* else:
* for i in range(dst_extent): # <<<<<<<<<<<<<<
* memcpy(dst_data, src_data, itemsize)
* src_data += src_stride
*/
/*else*/ {
__pyx_t_4 = __pyx_v_dst_extent;
__pyx_t_5 = __pyx_t_4;
for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) {
__pyx_v_i = __pyx_t_6;
/* "View.MemoryView":1158
* else:
* for i in range(dst_extent):
* memcpy(dst_data, src_data, itemsize) # <<<<<<<<<<<<<<
* src_data += src_stride
* dst_data += dst_stride
*/
(void)(memcpy(__pyx_v_dst_data, __pyx_v_src_data, __pyx_v_itemsize));
/* "View.MemoryView":1159
* for i in range(dst_extent):
* memcpy(dst_data, src_data, itemsize)
* src_data += src_stride # <<<<<<<<<<<<<<
* dst_data += dst_stride
* else:
*/
__pyx_v_src_data = (__pyx_v_src_data + __pyx_v_src_stride);
/* "View.MemoryView":1160
* memcpy(dst_data, src_data, itemsize)
* src_data += src_stride
* dst_data += dst_stride # <<<<<<<<<<<<<<
* else:
* for i in range(dst_extent):
*/
__pyx_v_dst_data = (__pyx_v_dst_data + __pyx_v_dst_stride);
}
}
__pyx_L4:;
/* "View.MemoryView":1152
* cdef Py_ssize_t dst_stride = dst_strides[0]
*
* if ndim == 1: # <<<<<<<<<<<<<<
* if (src_stride > 0 and dst_stride > 0 and
* <size_t> src_stride == itemsize == <size_t> dst_stride):
*/
goto __pyx_L3;
}
/* "View.MemoryView":1162
* dst_data += dst_stride
* else:
* for i in range(dst_extent): # <<<<<<<<<<<<<<
* _copy_strided_to_strided(src_data, src_strides + 1,
* dst_data, dst_strides + 1,
*/
/*else*/ {
__pyx_t_4 = __pyx_v_dst_extent;
__pyx_t_5 = __pyx_t_4;
for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) {
__pyx_v_i = __pyx_t_6;
/* "View.MemoryView":1163
* else:
* for i in range(dst_extent):
* _copy_strided_to_strided(src_data, src_strides + 1, # <<<<<<<<<<<<<<
* dst_data, dst_strides + 1,
* src_shape + 1, dst_shape + 1,
*/
_copy_strided_to_strided(__pyx_v_src_data, (__pyx_v_src_strides + 1), __pyx_v_dst_data, (__pyx_v_dst_strides + 1), (__pyx_v_src_shape + 1), (__pyx_v_dst_shape + 1), (__pyx_v_ndim - 1), __pyx_v_itemsize);
/* "View.MemoryView":1167
* src_shape + 1, dst_shape + 1,
* ndim - 1, itemsize)
* src_data += src_stride # <<<<<<<<<<<<<<
* dst_data += dst_stride
*
*/
__pyx_v_src_data = (__pyx_v_src_data + __pyx_v_src_stride);
/* "View.MemoryView":1168
* ndim - 1, itemsize)
* src_data += src_stride
* dst_data += dst_stride # <<<<<<<<<<<<<<
*
* cdef void copy_strided_to_strided(__Pyx_memviewslice *src,
*/
__pyx_v_dst_data = (__pyx_v_dst_data + __pyx_v_dst_stride);
}
}
__pyx_L3:;
/* "View.MemoryView":1140
*
* @cython.cdivision(True)
* cdef void _copy_strided_to_strided(char *src_data, Py_ssize_t *src_strides, # <<<<<<<<<<<<<<
* char *dst_data, Py_ssize_t *dst_strides,
* Py_ssize_t *src_shape, Py_ssize_t *dst_shape,
*/
/* function exit code */
}
/* "View.MemoryView":1170
* dst_data += dst_stride
*
* cdef void copy_strided_to_strided(__Pyx_memviewslice *src, # <<<<<<<<<<<<<<
* __Pyx_memviewslice *dst,
* int ndim, size_t itemsize) nogil:
*/
static void copy_strided_to_strided(__Pyx_memviewslice *__pyx_v_src, __Pyx_memviewslice *__pyx_v_dst, int __pyx_v_ndim, size_t __pyx_v_itemsize) {
/* "View.MemoryView":1173
* __Pyx_memviewslice *dst,
* int ndim, size_t itemsize) nogil:
* _copy_strided_to_strided(src.data, src.strides, dst.data, dst.strides, # <<<<<<<<<<<<<<
* src.shape, dst.shape, ndim, itemsize)
*
*/
_copy_strided_to_strided(__pyx_v_src->data, __pyx_v_src->strides, __pyx_v_dst->data, __pyx_v_dst->strides, __pyx_v_src->shape, __pyx_v_dst->shape, __pyx_v_ndim, __pyx_v_itemsize);
/* "View.MemoryView":1170
* dst_data += dst_stride
*
* cdef void copy_strided_to_strided(__Pyx_memviewslice *src, # <<<<<<<<<<<<<<
* __Pyx_memviewslice *dst,
* int ndim, size_t itemsize) nogil:
*/
/* function exit code */
}
/* "View.MemoryView":1177
*
* @cname('__pyx_memoryview_slice_get_size')
* cdef Py_ssize_t slice_get_size(__Pyx_memviewslice *src, int ndim) nogil: # <<<<<<<<<<<<<<
* "Return the size of the memory occupied by the slice in number of bytes"
* cdef Py_ssize_t shape, size = src.memview.view.itemsize
*/
static Py_ssize_t __pyx_memoryview_slice_get_size(__Pyx_memviewslice *__pyx_v_src, int __pyx_v_ndim) {
Py_ssize_t __pyx_v_shape;
Py_ssize_t __pyx_v_size;
Py_ssize_t __pyx_r;
Py_ssize_t __pyx_t_1;
Py_ssize_t *__pyx_t_2;
Py_ssize_t *__pyx_t_3;
Py_ssize_t *__pyx_t_4;
/* "View.MemoryView":1179
* cdef Py_ssize_t slice_get_size(__Pyx_memviewslice *src, int ndim) nogil:
* "Return the size of the memory occupied by the slice in number of bytes"
* cdef Py_ssize_t shape, size = src.memview.view.itemsize # <<<<<<<<<<<<<<
*
* for shape in src.shape[:ndim]:
*/
__pyx_t_1 = __pyx_v_src->memview->view.itemsize;
__pyx_v_size = __pyx_t_1;
/* "View.MemoryView":1181
* cdef Py_ssize_t shape, size = src.memview.view.itemsize
*
* for shape in src.shape[:ndim]: # <<<<<<<<<<<<<<
* size *= shape
*
*/
__pyx_t_3 = (__pyx_v_src->shape + __pyx_v_ndim);
for (__pyx_t_4 = __pyx_v_src->shape; __pyx_t_4 < __pyx_t_3; __pyx_t_4++) {
__pyx_t_2 = __pyx_t_4;
__pyx_v_shape = (__pyx_t_2[0]);
/* "View.MemoryView":1182
*
* for shape in src.shape[:ndim]:
* size *= shape # <<<<<<<<<<<<<<
*
* return size
*/
__pyx_v_size = (__pyx_v_size * __pyx_v_shape);
}
/* "View.MemoryView":1184
* size *= shape
*
* return size # <<<<<<<<<<<<<<
*
* @cname('__pyx_fill_contig_strides_array')
*/
__pyx_r = __pyx_v_size;
goto __pyx_L0;
/* "View.MemoryView":1177
*
* @cname('__pyx_memoryview_slice_get_size')
* cdef Py_ssize_t slice_get_size(__Pyx_memviewslice *src, int ndim) nogil: # <<<<<<<<<<<<<<
* "Return the size of the memory occupied by the slice in number of bytes"
* cdef Py_ssize_t shape, size = src.memview.view.itemsize
*/
/* function exit code */
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":1187
*
* @cname('__pyx_fill_contig_strides_array')
* cdef Py_ssize_t fill_contig_strides_array( # <<<<<<<<<<<<<<
* Py_ssize_t *shape, Py_ssize_t *strides, Py_ssize_t stride,
* int ndim, char order) nogil:
*/
static Py_ssize_t __pyx_fill_contig_strides_array(Py_ssize_t *__pyx_v_shape, Py_ssize_t *__pyx_v_strides, Py_ssize_t __pyx_v_stride, int __pyx_v_ndim, char __pyx_v_order) {
int __pyx_v_idx;
Py_ssize_t __pyx_r;
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_t_4;
/* "View.MemoryView":1196
* cdef int idx
*
* if order == 'F': # <<<<<<<<<<<<<<
* for idx in range(ndim):
* strides[idx] = stride
*/
__pyx_t_1 = ((__pyx_v_order == 'F') != 0);
if (__pyx_t_1) {
/* "View.MemoryView":1197
*
* if order == 'F':
* for idx in range(ndim): # <<<<<<<<<<<<<<
* strides[idx] = stride
* stride *= shape[idx]
*/
__pyx_t_2 = __pyx_v_ndim;
__pyx_t_3 = __pyx_t_2;
for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {
__pyx_v_idx = __pyx_t_4;
/* "View.MemoryView":1198
* if order == 'F':
* for idx in range(ndim):
* strides[idx] = stride # <<<<<<<<<<<<<<
* stride *= shape[idx]
* else:
*/
(__pyx_v_strides[__pyx_v_idx]) = __pyx_v_stride;
/* "View.MemoryView":1199
* for idx in range(ndim):
* strides[idx] = stride
* stride *= shape[idx] # <<<<<<<<<<<<<<
* else:
* for idx in range(ndim - 1, -1, -1):
*/
__pyx_v_stride = (__pyx_v_stride * (__pyx_v_shape[__pyx_v_idx]));
}
/* "View.MemoryView":1196
* cdef int idx
*
* if order == 'F': # <<<<<<<<<<<<<<
* for idx in range(ndim):
* strides[idx] = stride
*/
goto __pyx_L3;
}
/* "View.MemoryView":1201
* stride *= shape[idx]
* else:
* for idx in range(ndim - 1, -1, -1): # <<<<<<<<<<<<<<
* strides[idx] = stride
* stride *= shape[idx]
*/
/*else*/ {
for (__pyx_t_2 = (__pyx_v_ndim - 1); __pyx_t_2 > -1; __pyx_t_2-=1) {
__pyx_v_idx = __pyx_t_2;
/* "View.MemoryView":1202
* else:
* for idx in range(ndim - 1, -1, -1):
* strides[idx] = stride # <<<<<<<<<<<<<<
* stride *= shape[idx]
*
*/
(__pyx_v_strides[__pyx_v_idx]) = __pyx_v_stride;
/* "View.MemoryView":1203
* for idx in range(ndim - 1, -1, -1):
* strides[idx] = stride
* stride *= shape[idx] # <<<<<<<<<<<<<<
*
* return stride
*/
__pyx_v_stride = (__pyx_v_stride * (__pyx_v_shape[__pyx_v_idx]));
}
}
__pyx_L3:;
/* "View.MemoryView":1205
* stride *= shape[idx]
*
* return stride # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_copy_data_to_temp')
*/
__pyx_r = __pyx_v_stride;
goto __pyx_L0;
/* "View.MemoryView":1187
*
* @cname('__pyx_fill_contig_strides_array')
* cdef Py_ssize_t fill_contig_strides_array( # <<<<<<<<<<<<<<
* Py_ssize_t *shape, Py_ssize_t *strides, Py_ssize_t stride,
* int ndim, char order) nogil:
*/
/* function exit code */
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":1208
*
* @cname('__pyx_memoryview_copy_data_to_temp')
* cdef void *copy_data_to_temp(__Pyx_memviewslice *src, # <<<<<<<<<<<<<<
* __Pyx_memviewslice *tmpslice,
* char order,
*/
static void *__pyx_memoryview_copy_data_to_temp(__Pyx_memviewslice *__pyx_v_src, __Pyx_memviewslice *__pyx_v_tmpslice, char __pyx_v_order, int __pyx_v_ndim) {
int __pyx_v_i;
void *__pyx_v_result;
size_t __pyx_v_itemsize;
size_t __pyx_v_size;
void *__pyx_r;
Py_ssize_t __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
struct __pyx_memoryview_obj *__pyx_t_4;
int __pyx_t_5;
int __pyx_t_6;
/* "View.MemoryView":1219
* cdef void *result
*
* cdef size_t itemsize = src.memview.view.itemsize # <<<<<<<<<<<<<<
* cdef size_t size = slice_get_size(src, ndim)
*
*/
__pyx_t_1 = __pyx_v_src->memview->view.itemsize;
__pyx_v_itemsize = __pyx_t_1;
/* "View.MemoryView":1220
*
* cdef size_t itemsize = src.memview.view.itemsize
* cdef size_t size = slice_get_size(src, ndim) # <<<<<<<<<<<<<<
*
* result = malloc(size)
*/
__pyx_v_size = __pyx_memoryview_slice_get_size(__pyx_v_src, __pyx_v_ndim);
/* "View.MemoryView":1222
* cdef size_t size = slice_get_size(src, ndim)
*
* result = malloc(size) # <<<<<<<<<<<<<<
* if not result:
* _err(MemoryError, NULL)
*/
__pyx_v_result = malloc(__pyx_v_size);
/* "View.MemoryView":1223
*
* result = malloc(size)
* if not result: # <<<<<<<<<<<<<<
* _err(MemoryError, NULL)
*
*/
__pyx_t_2 = ((!(__pyx_v_result != 0)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1224
* result = malloc(size)
* if not result:
* _err(MemoryError, NULL) # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_3 = __pyx_memoryview_err(__pyx_builtin_MemoryError, NULL); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(2, 1224, __pyx_L1_error)
/* "View.MemoryView":1223
*
* result = malloc(size)
* if not result: # <<<<<<<<<<<<<<
* _err(MemoryError, NULL)
*
*/
}
/* "View.MemoryView":1227
*
*
* tmpslice.data = <char *> result # <<<<<<<<<<<<<<
* tmpslice.memview = src.memview
* for i in range(ndim):
*/
__pyx_v_tmpslice->data = ((char *)__pyx_v_result);
/* "View.MemoryView":1228
*
* tmpslice.data = <char *> result
* tmpslice.memview = src.memview # <<<<<<<<<<<<<<
* for i in range(ndim):
* tmpslice.shape[i] = src.shape[i]
*/
__pyx_t_4 = __pyx_v_src->memview;
__pyx_v_tmpslice->memview = __pyx_t_4;
/* "View.MemoryView":1229
* tmpslice.data = <char *> result
* tmpslice.memview = src.memview
* for i in range(ndim): # <<<<<<<<<<<<<<
* tmpslice.shape[i] = src.shape[i]
* tmpslice.suboffsets[i] = -1
*/
__pyx_t_3 = __pyx_v_ndim;
__pyx_t_5 = __pyx_t_3;
for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) {
__pyx_v_i = __pyx_t_6;
/* "View.MemoryView":1230
* tmpslice.memview = src.memview
* for i in range(ndim):
* tmpslice.shape[i] = src.shape[i] # <<<<<<<<<<<<<<
* tmpslice.suboffsets[i] = -1
*
*/
(__pyx_v_tmpslice->shape[__pyx_v_i]) = (__pyx_v_src->shape[__pyx_v_i]);
/* "View.MemoryView":1231
* for i in range(ndim):
* tmpslice.shape[i] = src.shape[i]
* tmpslice.suboffsets[i] = -1 # <<<<<<<<<<<<<<
*
* fill_contig_strides_array(&tmpslice.shape[0], &tmpslice.strides[0], itemsize,
*/
(__pyx_v_tmpslice->suboffsets[__pyx_v_i]) = -1L;
}
/* "View.MemoryView":1233
* tmpslice.suboffsets[i] = -1
*
* fill_contig_strides_array(&tmpslice.shape[0], &tmpslice.strides[0], itemsize, # <<<<<<<<<<<<<<
* ndim, order)
*
*/
(void)(__pyx_fill_contig_strides_array((&(__pyx_v_tmpslice->shape[0])), (&(__pyx_v_tmpslice->strides[0])), __pyx_v_itemsize, __pyx_v_ndim, __pyx_v_order));
/* "View.MemoryView":1237
*
*
* for i in range(ndim): # <<<<<<<<<<<<<<
* if tmpslice.shape[i] == 1:
* tmpslice.strides[i] = 0
*/
__pyx_t_3 = __pyx_v_ndim;
__pyx_t_5 = __pyx_t_3;
for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) {
__pyx_v_i = __pyx_t_6;
/* "View.MemoryView":1238
*
* for i in range(ndim):
* if tmpslice.shape[i] == 1: # <<<<<<<<<<<<<<
* tmpslice.strides[i] = 0
*
*/
__pyx_t_2 = (((__pyx_v_tmpslice->shape[__pyx_v_i]) == 1) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1239
* for i in range(ndim):
* if tmpslice.shape[i] == 1:
* tmpslice.strides[i] = 0 # <<<<<<<<<<<<<<
*
* if slice_is_contig(src[0], order, ndim):
*/
(__pyx_v_tmpslice->strides[__pyx_v_i]) = 0;
/* "View.MemoryView":1238
*
* for i in range(ndim):
* if tmpslice.shape[i] == 1: # <<<<<<<<<<<<<<
* tmpslice.strides[i] = 0
*
*/
}
}
/* "View.MemoryView":1241
* tmpslice.strides[i] = 0
*
* if slice_is_contig(src[0], order, ndim): # <<<<<<<<<<<<<<
* memcpy(result, src.data, size)
* else:
*/
__pyx_t_2 = (__pyx_memviewslice_is_contig((__pyx_v_src[0]), __pyx_v_order, __pyx_v_ndim) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1242
*
* if slice_is_contig(src[0], order, ndim):
* memcpy(result, src.data, size) # <<<<<<<<<<<<<<
* else:
* copy_strided_to_strided(src, tmpslice, ndim, itemsize)
*/
(void)(memcpy(__pyx_v_result, __pyx_v_src->data, __pyx_v_size));
/* "View.MemoryView":1241
* tmpslice.strides[i] = 0
*
* if slice_is_contig(src[0], order, ndim): # <<<<<<<<<<<<<<
* memcpy(result, src.data, size)
* else:
*/
goto __pyx_L9;
}
/* "View.MemoryView":1244
* memcpy(result, src.data, size)
* else:
* copy_strided_to_strided(src, tmpslice, ndim, itemsize) # <<<<<<<<<<<<<<
*
* return result
*/
/*else*/ {
copy_strided_to_strided(__pyx_v_src, __pyx_v_tmpslice, __pyx_v_ndim, __pyx_v_itemsize);
}
__pyx_L9:;
/* "View.MemoryView":1246
* copy_strided_to_strided(src, tmpslice, ndim, itemsize)
*
* return result # <<<<<<<<<<<<<<
*
*
*/
__pyx_r = __pyx_v_result;
goto __pyx_L0;
/* "View.MemoryView":1208
*
* @cname('__pyx_memoryview_copy_data_to_temp')
* cdef void *copy_data_to_temp(__Pyx_memviewslice *src, # <<<<<<<<<<<<<<
* __Pyx_memviewslice *tmpslice,
* char order,
*/
/* function exit code */
__pyx_L1_error:;
{
#ifdef WITH_THREAD
PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure();
#endif
__Pyx_AddTraceback("View.MemoryView.copy_data_to_temp", __pyx_clineno, __pyx_lineno, __pyx_filename);
#ifdef WITH_THREAD
__Pyx_PyGILState_Release(__pyx_gilstate_save);
#endif
}
__pyx_r = NULL;
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":1251
*
* @cname('__pyx_memoryview_err_extents')
* cdef int _err_extents(int i, Py_ssize_t extent1, # <<<<<<<<<<<<<<
* Py_ssize_t extent2) except -1 with gil:
* raise ValueError("got differing extents in dimension %d (got %d and %d)" %
*/
static int __pyx_memoryview_err_extents(int __pyx_v_i, Py_ssize_t __pyx_v_extent1, Py_ssize_t __pyx_v_extent2) {
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
#ifdef WITH_THREAD
PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure();
#endif
__Pyx_RefNannySetupContext("_err_extents", 0);
/* "View.MemoryView":1254
* Py_ssize_t extent2) except -1 with gil:
* raise ValueError("got differing extents in dimension %d (got %d and %d)" %
* (i, extent1, extent2)) # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_err_dim')
*/
__pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_i); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1254, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_t_2 = PyInt_FromSsize_t(__pyx_v_extent1); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 1254, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = PyInt_FromSsize_t(__pyx_v_extent2); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 1254, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 1254, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_GIVEREF(__pyx_t_1);
PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_2);
PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_2);
__Pyx_GIVEREF(__pyx_t_3);
PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_3);
__pyx_t_1 = 0;
__pyx_t_2 = 0;
__pyx_t_3 = 0;
/* "View.MemoryView":1253
* cdef int _err_extents(int i, Py_ssize_t extent1,
* Py_ssize_t extent2) except -1 with gil:
* raise ValueError("got differing extents in dimension %d (got %d and %d)" % # <<<<<<<<<<<<<<
* (i, extent1, extent2))
*
*/
__pyx_t_3 = __Pyx_PyString_Format(__pyx_kp_s_got_differing_extents_in_dimensi, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 1253, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 1253, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_Raise(__pyx_t_4, 0, 0, 0);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__PYX_ERR(2, 1253, __pyx_L1_error)
/* "View.MemoryView":1251
*
* @cname('__pyx_memoryview_err_extents')
* cdef int _err_extents(int i, Py_ssize_t extent1, # <<<<<<<<<<<<<<
* Py_ssize_t extent2) except -1 with gil:
* raise ValueError("got differing extents in dimension %d (got %d and %d)" %
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_AddTraceback("View.MemoryView._err_extents", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__Pyx_RefNannyFinishContext();
#ifdef WITH_THREAD
__Pyx_PyGILState_Release(__pyx_gilstate_save);
#endif
return __pyx_r;
}
/* "View.MemoryView":1257
*
* @cname('__pyx_memoryview_err_dim')
* cdef int _err_dim(object error, char *msg, int dim) except -1 with gil: # <<<<<<<<<<<<<<
* raise error(msg.decode('ascii') % dim)
*
*/
static int __pyx_memoryview_err_dim(PyObject *__pyx_v_error, char *__pyx_v_msg, int __pyx_v_dim) {
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
#ifdef WITH_THREAD
PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure();
#endif
__Pyx_RefNannySetupContext("_err_dim", 0);
__Pyx_INCREF(__pyx_v_error);
/* "View.MemoryView":1258
* @cname('__pyx_memoryview_err_dim')
* cdef int _err_dim(object error, char *msg, int dim) except -1 with gil:
* raise error(msg.decode('ascii') % dim) # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_err')
*/
__pyx_t_2 = __Pyx_decode_c_string(__pyx_v_msg, 0, strlen(__pyx_v_msg), NULL, NULL, PyUnicode_DecodeASCII); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 1258, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_dim); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 1258, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_4 = PyUnicode_Format(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 1258, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_INCREF(__pyx_v_error);
__pyx_t_3 = __pyx_v_error; __pyx_t_2 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
__pyx_t_2 = PyMethod_GET_SELF(__pyx_t_3);
if (likely(__pyx_t_2)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
__Pyx_INCREF(__pyx_t_2);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_3, function);
}
}
__pyx_t_1 = (__pyx_t_2) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_2, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4);
__Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1258, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(2, 1258, __pyx_L1_error)
/* "View.MemoryView":1257
*
* @cname('__pyx_memoryview_err_dim')
* cdef int _err_dim(object error, char *msg, int dim) except -1 with gil: # <<<<<<<<<<<<<<
* raise error(msg.decode('ascii') % dim)
*
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_AddTraceback("View.MemoryView._err_dim", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__Pyx_XDECREF(__pyx_v_error);
__Pyx_RefNannyFinishContext();
#ifdef WITH_THREAD
__Pyx_PyGILState_Release(__pyx_gilstate_save);
#endif
return __pyx_r;
}
/* "View.MemoryView":1261
*
* @cname('__pyx_memoryview_err')
* cdef int _err(object error, char *msg) except -1 with gil: # <<<<<<<<<<<<<<
* if msg != NULL:
* raise error(msg.decode('ascii'))
*/
static int __pyx_memoryview_err(PyObject *__pyx_v_error, char *__pyx_v_msg) {
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
#ifdef WITH_THREAD
PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure();
#endif
__Pyx_RefNannySetupContext("_err", 0);
__Pyx_INCREF(__pyx_v_error);
/* "View.MemoryView":1262
* @cname('__pyx_memoryview_err')
* cdef int _err(object error, char *msg) except -1 with gil:
* if msg != NULL: # <<<<<<<<<<<<<<
* raise error(msg.decode('ascii'))
* else:
*/
__pyx_t_1 = ((__pyx_v_msg != NULL) != 0);
if (unlikely(__pyx_t_1)) {
/* "View.MemoryView":1263
* cdef int _err(object error, char *msg) except -1 with gil:
* if msg != NULL:
* raise error(msg.decode('ascii')) # <<<<<<<<<<<<<<
* else:
* raise error
*/
__pyx_t_3 = __Pyx_decode_c_string(__pyx_v_msg, 0, strlen(__pyx_v_msg), NULL, NULL, PyUnicode_DecodeASCII); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 1263, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_INCREF(__pyx_v_error);
__pyx_t_4 = __pyx_v_error; __pyx_t_5 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) {
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4);
if (likely(__pyx_t_5)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
__Pyx_INCREF(__pyx_t_5);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_4, function);
}
}
__pyx_t_2 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_t_3) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_3);
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 1263, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_Raise(__pyx_t_2, 0, 0, 0);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__PYX_ERR(2, 1263, __pyx_L1_error)
/* "View.MemoryView":1262
* @cname('__pyx_memoryview_err')
* cdef int _err(object error, char *msg) except -1 with gil:
* if msg != NULL: # <<<<<<<<<<<<<<
* raise error(msg.decode('ascii'))
* else:
*/
}
/* "View.MemoryView":1265
* raise error(msg.decode('ascii'))
* else:
* raise error # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_copy_contents')
*/
/*else*/ {
__Pyx_Raise(__pyx_v_error, 0, 0, 0);
__PYX_ERR(2, 1265, __pyx_L1_error)
}
/* "View.MemoryView":1261
*
* @cname('__pyx_memoryview_err')
* cdef int _err(object error, char *msg) except -1 with gil: # <<<<<<<<<<<<<<
* if msg != NULL:
* raise error(msg.decode('ascii'))
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView._err", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = -1;
__Pyx_XDECREF(__pyx_v_error);
__Pyx_RefNannyFinishContext();
#ifdef WITH_THREAD
__Pyx_PyGILState_Release(__pyx_gilstate_save);
#endif
return __pyx_r;
}
/* "View.MemoryView":1268
*
* @cname('__pyx_memoryview_copy_contents')
* cdef int memoryview_copy_contents(__Pyx_memviewslice src, # <<<<<<<<<<<<<<
* __Pyx_memviewslice dst,
* int src_ndim, int dst_ndim,
*/
static int __pyx_memoryview_copy_contents(__Pyx_memviewslice __pyx_v_src, __Pyx_memviewslice __pyx_v_dst, int __pyx_v_src_ndim, int __pyx_v_dst_ndim, int __pyx_v_dtype_is_object) {
void *__pyx_v_tmpdata;
size_t __pyx_v_itemsize;
int __pyx_v_i;
char __pyx_v_order;
int __pyx_v_broadcasting;
int __pyx_v_direct_copy;
__Pyx_memviewslice __pyx_v_tmp;
int __pyx_v_ndim;
int __pyx_r;
Py_ssize_t __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
int __pyx_t_4;
int __pyx_t_5;
int __pyx_t_6;
void *__pyx_t_7;
int __pyx_t_8;
/* "View.MemoryView":1276
* Check for overlapping memory and verify the shapes.
* """
* cdef void *tmpdata = NULL # <<<<<<<<<<<<<<
* cdef size_t itemsize = src.memview.view.itemsize
* cdef int i
*/
__pyx_v_tmpdata = NULL;
/* "View.MemoryView":1277
* """
* cdef void *tmpdata = NULL
* cdef size_t itemsize = src.memview.view.itemsize # <<<<<<<<<<<<<<
* cdef int i
* cdef char order = get_best_order(&src, src_ndim)
*/
__pyx_t_1 = __pyx_v_src.memview->view.itemsize;
__pyx_v_itemsize = __pyx_t_1;
/* "View.MemoryView":1279
* cdef size_t itemsize = src.memview.view.itemsize
* cdef int i
* cdef char order = get_best_order(&src, src_ndim) # <<<<<<<<<<<<<<
* cdef bint broadcasting = False
* cdef bint direct_copy = False
*/
__pyx_v_order = __pyx_get_best_slice_order((&__pyx_v_src), __pyx_v_src_ndim);
/* "View.MemoryView":1280
* cdef int i
* cdef char order = get_best_order(&src, src_ndim)
* cdef bint broadcasting = False # <<<<<<<<<<<<<<
* cdef bint direct_copy = False
* cdef __Pyx_memviewslice tmp
*/
__pyx_v_broadcasting = 0;
/* "View.MemoryView":1281
* cdef char order = get_best_order(&src, src_ndim)
* cdef bint broadcasting = False
* cdef bint direct_copy = False # <<<<<<<<<<<<<<
* cdef __Pyx_memviewslice tmp
*
*/
__pyx_v_direct_copy = 0;
/* "View.MemoryView":1284
* cdef __Pyx_memviewslice tmp
*
* if src_ndim < dst_ndim: # <<<<<<<<<<<<<<
* broadcast_leading(&src, src_ndim, dst_ndim)
* elif dst_ndim < src_ndim:
*/
__pyx_t_2 = ((__pyx_v_src_ndim < __pyx_v_dst_ndim) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1285
*
* if src_ndim < dst_ndim:
* broadcast_leading(&src, src_ndim, dst_ndim) # <<<<<<<<<<<<<<
* elif dst_ndim < src_ndim:
* broadcast_leading(&dst, dst_ndim, src_ndim)
*/
__pyx_memoryview_broadcast_leading((&__pyx_v_src), __pyx_v_src_ndim, __pyx_v_dst_ndim);
/* "View.MemoryView":1284
* cdef __Pyx_memviewslice tmp
*
* if src_ndim < dst_ndim: # <<<<<<<<<<<<<<
* broadcast_leading(&src, src_ndim, dst_ndim)
* elif dst_ndim < src_ndim:
*/
goto __pyx_L3;
}
/* "View.MemoryView":1286
* if src_ndim < dst_ndim:
* broadcast_leading(&src, src_ndim, dst_ndim)
* elif dst_ndim < src_ndim: # <<<<<<<<<<<<<<
* broadcast_leading(&dst, dst_ndim, src_ndim)
*
*/
__pyx_t_2 = ((__pyx_v_dst_ndim < __pyx_v_src_ndim) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1287
* broadcast_leading(&src, src_ndim, dst_ndim)
* elif dst_ndim < src_ndim:
* broadcast_leading(&dst, dst_ndim, src_ndim) # <<<<<<<<<<<<<<
*
* cdef int ndim = max(src_ndim, dst_ndim)
*/
__pyx_memoryview_broadcast_leading((&__pyx_v_dst), __pyx_v_dst_ndim, __pyx_v_src_ndim);
/* "View.MemoryView":1286
* if src_ndim < dst_ndim:
* broadcast_leading(&src, src_ndim, dst_ndim)
* elif dst_ndim < src_ndim: # <<<<<<<<<<<<<<
* broadcast_leading(&dst, dst_ndim, src_ndim)
*
*/
}
__pyx_L3:;
/* "View.MemoryView":1289
* broadcast_leading(&dst, dst_ndim, src_ndim)
*
* cdef int ndim = max(src_ndim, dst_ndim) # <<<<<<<<<<<<<<
*
* for i in range(ndim):
*/
__pyx_t_3 = __pyx_v_dst_ndim;
__pyx_t_4 = __pyx_v_src_ndim;
if (((__pyx_t_3 > __pyx_t_4) != 0)) {
__pyx_t_5 = __pyx_t_3;
} else {
__pyx_t_5 = __pyx_t_4;
}
__pyx_v_ndim = __pyx_t_5;
/* "View.MemoryView":1291
* cdef int ndim = max(src_ndim, dst_ndim)
*
* for i in range(ndim): # <<<<<<<<<<<<<<
* if src.shape[i] != dst.shape[i]:
* if src.shape[i] == 1:
*/
__pyx_t_5 = __pyx_v_ndim;
__pyx_t_3 = __pyx_t_5;
for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {
__pyx_v_i = __pyx_t_4;
/* "View.MemoryView":1292
*
* for i in range(ndim):
* if src.shape[i] != dst.shape[i]: # <<<<<<<<<<<<<<
* if src.shape[i] == 1:
* broadcasting = True
*/
__pyx_t_2 = (((__pyx_v_src.shape[__pyx_v_i]) != (__pyx_v_dst.shape[__pyx_v_i])) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1293
* for i in range(ndim):
* if src.shape[i] != dst.shape[i]:
* if src.shape[i] == 1: # <<<<<<<<<<<<<<
* broadcasting = True
* src.strides[i] = 0
*/
__pyx_t_2 = (((__pyx_v_src.shape[__pyx_v_i]) == 1) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1294
* if src.shape[i] != dst.shape[i]:
* if src.shape[i] == 1:
* broadcasting = True # <<<<<<<<<<<<<<
* src.strides[i] = 0
* else:
*/
__pyx_v_broadcasting = 1;
/* "View.MemoryView":1295
* if src.shape[i] == 1:
* broadcasting = True
* src.strides[i] = 0 # <<<<<<<<<<<<<<
* else:
* _err_extents(i, dst.shape[i], src.shape[i])
*/
(__pyx_v_src.strides[__pyx_v_i]) = 0;
/* "View.MemoryView":1293
* for i in range(ndim):
* if src.shape[i] != dst.shape[i]:
* if src.shape[i] == 1: # <<<<<<<<<<<<<<
* broadcasting = True
* src.strides[i] = 0
*/
goto __pyx_L7;
}
/* "View.MemoryView":1297
* src.strides[i] = 0
* else:
* _err_extents(i, dst.shape[i], src.shape[i]) # <<<<<<<<<<<<<<
*
* if src.suboffsets[i] >= 0:
*/
/*else*/ {
__pyx_t_6 = __pyx_memoryview_err_extents(__pyx_v_i, (__pyx_v_dst.shape[__pyx_v_i]), (__pyx_v_src.shape[__pyx_v_i])); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(2, 1297, __pyx_L1_error)
}
__pyx_L7:;
/* "View.MemoryView":1292
*
* for i in range(ndim):
* if src.shape[i] != dst.shape[i]: # <<<<<<<<<<<<<<
* if src.shape[i] == 1:
* broadcasting = True
*/
}
/* "View.MemoryView":1299
* _err_extents(i, dst.shape[i], src.shape[i])
*
* if src.suboffsets[i] >= 0: # <<<<<<<<<<<<<<
* _err_dim(ValueError, "Dimension %d is not direct", i)
*
*/
__pyx_t_2 = (((__pyx_v_src.suboffsets[__pyx_v_i]) >= 0) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1300
*
* if src.suboffsets[i] >= 0:
* _err_dim(ValueError, "Dimension %d is not direct", i) # <<<<<<<<<<<<<<
*
* if slices_overlap(&src, &dst, ndim, itemsize):
*/
__pyx_t_6 = __pyx_memoryview_err_dim(__pyx_builtin_ValueError, ((char *)"Dimension %d is not direct"), __pyx_v_i); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(2, 1300, __pyx_L1_error)
/* "View.MemoryView":1299
* _err_extents(i, dst.shape[i], src.shape[i])
*
* if src.suboffsets[i] >= 0: # <<<<<<<<<<<<<<
* _err_dim(ValueError, "Dimension %d is not direct", i)
*
*/
}
}
/* "View.MemoryView":1302
* _err_dim(ValueError, "Dimension %d is not direct", i)
*
* if slices_overlap(&src, &dst, ndim, itemsize): # <<<<<<<<<<<<<<
*
* if not slice_is_contig(src, order, ndim):
*/
__pyx_t_2 = (__pyx_slices_overlap((&__pyx_v_src), (&__pyx_v_dst), __pyx_v_ndim, __pyx_v_itemsize) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1304
* if slices_overlap(&src, &dst, ndim, itemsize):
*
* if not slice_is_contig(src, order, ndim): # <<<<<<<<<<<<<<
* order = get_best_order(&dst, ndim)
*
*/
__pyx_t_2 = ((!(__pyx_memviewslice_is_contig(__pyx_v_src, __pyx_v_order, __pyx_v_ndim) != 0)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1305
*
* if not slice_is_contig(src, order, ndim):
* order = get_best_order(&dst, ndim) # <<<<<<<<<<<<<<
*
* tmpdata = copy_data_to_temp(&src, &tmp, order, ndim)
*/
__pyx_v_order = __pyx_get_best_slice_order((&__pyx_v_dst), __pyx_v_ndim);
/* "View.MemoryView":1304
* if slices_overlap(&src, &dst, ndim, itemsize):
*
* if not slice_is_contig(src, order, ndim): # <<<<<<<<<<<<<<
* order = get_best_order(&dst, ndim)
*
*/
}
/* "View.MemoryView":1307
* order = get_best_order(&dst, ndim)
*
* tmpdata = copy_data_to_temp(&src, &tmp, order, ndim) # <<<<<<<<<<<<<<
* src = tmp
*
*/
__pyx_t_7 = __pyx_memoryview_copy_data_to_temp((&__pyx_v_src), (&__pyx_v_tmp), __pyx_v_order, __pyx_v_ndim); if (unlikely(__pyx_t_7 == ((void *)NULL))) __PYX_ERR(2, 1307, __pyx_L1_error)
__pyx_v_tmpdata = __pyx_t_7;
/* "View.MemoryView":1308
*
* tmpdata = copy_data_to_temp(&src, &tmp, order, ndim)
* src = tmp # <<<<<<<<<<<<<<
*
* if not broadcasting:
*/
__pyx_v_src = __pyx_v_tmp;
/* "View.MemoryView":1302
* _err_dim(ValueError, "Dimension %d is not direct", i)
*
* if slices_overlap(&src, &dst, ndim, itemsize): # <<<<<<<<<<<<<<
*
* if not slice_is_contig(src, order, ndim):
*/
}
/* "View.MemoryView":1310
* src = tmp
*
* if not broadcasting: # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_2 = ((!(__pyx_v_broadcasting != 0)) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1313
*
*
* if slice_is_contig(src, 'C', ndim): # <<<<<<<<<<<<<<
* direct_copy = slice_is_contig(dst, 'C', ndim)
* elif slice_is_contig(src, 'F', ndim):
*/
__pyx_t_2 = (__pyx_memviewslice_is_contig(__pyx_v_src, 'C', __pyx_v_ndim) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1314
*
* if slice_is_contig(src, 'C', ndim):
* direct_copy = slice_is_contig(dst, 'C', ndim) # <<<<<<<<<<<<<<
* elif slice_is_contig(src, 'F', ndim):
* direct_copy = slice_is_contig(dst, 'F', ndim)
*/
__pyx_v_direct_copy = __pyx_memviewslice_is_contig(__pyx_v_dst, 'C', __pyx_v_ndim);
/* "View.MemoryView":1313
*
*
* if slice_is_contig(src, 'C', ndim): # <<<<<<<<<<<<<<
* direct_copy = slice_is_contig(dst, 'C', ndim)
* elif slice_is_contig(src, 'F', ndim):
*/
goto __pyx_L12;
}
/* "View.MemoryView":1315
* if slice_is_contig(src, 'C', ndim):
* direct_copy = slice_is_contig(dst, 'C', ndim)
* elif slice_is_contig(src, 'F', ndim): # <<<<<<<<<<<<<<
* direct_copy = slice_is_contig(dst, 'F', ndim)
*
*/
__pyx_t_2 = (__pyx_memviewslice_is_contig(__pyx_v_src, 'F', __pyx_v_ndim) != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1316
* direct_copy = slice_is_contig(dst, 'C', ndim)
* elif slice_is_contig(src, 'F', ndim):
* direct_copy = slice_is_contig(dst, 'F', ndim) # <<<<<<<<<<<<<<
*
* if direct_copy:
*/
__pyx_v_direct_copy = __pyx_memviewslice_is_contig(__pyx_v_dst, 'F', __pyx_v_ndim);
/* "View.MemoryView":1315
* if slice_is_contig(src, 'C', ndim):
* direct_copy = slice_is_contig(dst, 'C', ndim)
* elif slice_is_contig(src, 'F', ndim): # <<<<<<<<<<<<<<
* direct_copy = slice_is_contig(dst, 'F', ndim)
*
*/
}
__pyx_L12:;
/* "View.MemoryView":1318
* direct_copy = slice_is_contig(dst, 'F', ndim)
*
* if direct_copy: # <<<<<<<<<<<<<<
*
* refcount_copying(&dst, dtype_is_object, ndim, False)
*/
__pyx_t_2 = (__pyx_v_direct_copy != 0);
if (__pyx_t_2) {
/* "View.MemoryView":1320
* if direct_copy:
*
* refcount_copying(&dst, dtype_is_object, ndim, False) # <<<<<<<<<<<<<<
* memcpy(dst.data, src.data, slice_get_size(&src, ndim))
* refcount_copying(&dst, dtype_is_object, ndim, True)
*/
__pyx_memoryview_refcount_copying((&__pyx_v_dst), __pyx_v_dtype_is_object, __pyx_v_ndim, 0);
/* "View.MemoryView":1321
*
* refcount_copying(&dst, dtype_is_object, ndim, False)
* memcpy(dst.data, src.data, slice_get_size(&src, ndim)) # <<<<<<<<<<<<<<
* refcount_copying(&dst, dtype_is_object, ndim, True)
* free(tmpdata)
*/
(void)(memcpy(__pyx_v_dst.data, __pyx_v_src.data, __pyx_memoryview_slice_get_size((&__pyx_v_src), __pyx_v_ndim)));
/* "View.MemoryView":1322
* refcount_copying(&dst, dtype_is_object, ndim, False)
* memcpy(dst.data, src.data, slice_get_size(&src, ndim))
* refcount_copying(&dst, dtype_is_object, ndim, True) # <<<<<<<<<<<<<<
* free(tmpdata)
* return 0
*/
__pyx_memoryview_refcount_copying((&__pyx_v_dst), __pyx_v_dtype_is_object, __pyx_v_ndim, 1);
/* "View.MemoryView":1323
* memcpy(dst.data, src.data, slice_get_size(&src, ndim))
* refcount_copying(&dst, dtype_is_object, ndim, True)
* free(tmpdata) # <<<<<<<<<<<<<<
* return 0
*
*/
free(__pyx_v_tmpdata);
/* "View.MemoryView":1324
* refcount_copying(&dst, dtype_is_object, ndim, True)
* free(tmpdata)
* return 0 # <<<<<<<<<<<<<<
*
* if order == 'F' == get_best_order(&dst, ndim):
*/
__pyx_r = 0;
goto __pyx_L0;
/* "View.MemoryView":1318
* direct_copy = slice_is_contig(dst, 'F', ndim)
*
* if direct_copy: # <<<<<<<<<<<<<<
*
* refcount_copying(&dst, dtype_is_object, ndim, False)
*/
}
/* "View.MemoryView":1310
* src = tmp
*
* if not broadcasting: # <<<<<<<<<<<<<<
*
*
*/
}
/* "View.MemoryView":1326
* return 0
*
* if order == 'F' == get_best_order(&dst, ndim): # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_2 = (__pyx_v_order == 'F');
if (__pyx_t_2) {
__pyx_t_2 = ('F' == __pyx_get_best_slice_order((&__pyx_v_dst), __pyx_v_ndim));
}
__pyx_t_8 = (__pyx_t_2 != 0);
if (__pyx_t_8) {
/* "View.MemoryView":1329
*
*
* transpose_memslice(&src) # <<<<<<<<<<<<<<
* transpose_memslice(&dst)
*
*/
__pyx_t_5 = __pyx_memslice_transpose((&__pyx_v_src)); if (unlikely(__pyx_t_5 == ((int)0))) __PYX_ERR(2, 1329, __pyx_L1_error)
/* "View.MemoryView":1330
*
* transpose_memslice(&src)
* transpose_memslice(&dst) # <<<<<<<<<<<<<<
*
* refcount_copying(&dst, dtype_is_object, ndim, False)
*/
__pyx_t_5 = __pyx_memslice_transpose((&__pyx_v_dst)); if (unlikely(__pyx_t_5 == ((int)0))) __PYX_ERR(2, 1330, __pyx_L1_error)
/* "View.MemoryView":1326
* return 0
*
* if order == 'F' == get_best_order(&dst, ndim): # <<<<<<<<<<<<<<
*
*
*/
}
/* "View.MemoryView":1332
* transpose_memslice(&dst)
*
* refcount_copying(&dst, dtype_is_object, ndim, False) # <<<<<<<<<<<<<<
* copy_strided_to_strided(&src, &dst, ndim, itemsize)
* refcount_copying(&dst, dtype_is_object, ndim, True)
*/
__pyx_memoryview_refcount_copying((&__pyx_v_dst), __pyx_v_dtype_is_object, __pyx_v_ndim, 0);
/* "View.MemoryView":1333
*
* refcount_copying(&dst, dtype_is_object, ndim, False)
* copy_strided_to_strided(&src, &dst, ndim, itemsize) # <<<<<<<<<<<<<<
* refcount_copying(&dst, dtype_is_object, ndim, True)
*
*/
copy_strided_to_strided((&__pyx_v_src), (&__pyx_v_dst), __pyx_v_ndim, __pyx_v_itemsize);
/* "View.MemoryView":1334
* refcount_copying(&dst, dtype_is_object, ndim, False)
* copy_strided_to_strided(&src, &dst, ndim, itemsize)
* refcount_copying(&dst, dtype_is_object, ndim, True) # <<<<<<<<<<<<<<
*
* free(tmpdata)
*/
__pyx_memoryview_refcount_copying((&__pyx_v_dst), __pyx_v_dtype_is_object, __pyx_v_ndim, 1);
/* "View.MemoryView":1336
* refcount_copying(&dst, dtype_is_object, ndim, True)
*
* free(tmpdata) # <<<<<<<<<<<<<<
* return 0
*
*/
free(__pyx_v_tmpdata);
/* "View.MemoryView":1337
*
* free(tmpdata)
* return 0 # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_broadcast_leading')
*/
__pyx_r = 0;
goto __pyx_L0;
/* "View.MemoryView":1268
*
* @cname('__pyx_memoryview_copy_contents')
* cdef int memoryview_copy_contents(__Pyx_memviewslice src, # <<<<<<<<<<<<<<
* __Pyx_memviewslice dst,
* int src_ndim, int dst_ndim,
*/
/* function exit code */
__pyx_L1_error:;
{
#ifdef WITH_THREAD
PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure();
#endif
__Pyx_AddTraceback("View.MemoryView.memoryview_copy_contents", __pyx_clineno, __pyx_lineno, __pyx_filename);
#ifdef WITH_THREAD
__Pyx_PyGILState_Release(__pyx_gilstate_save);
#endif
}
__pyx_r = -1;
__pyx_L0:;
return __pyx_r;
}
/* "View.MemoryView":1340
*
* @cname('__pyx_memoryview_broadcast_leading')
* cdef void broadcast_leading(__Pyx_memviewslice *mslice, # <<<<<<<<<<<<<<
* int ndim,
* int ndim_other) nogil:
*/
static void __pyx_memoryview_broadcast_leading(__Pyx_memviewslice *__pyx_v_mslice, int __pyx_v_ndim, int __pyx_v_ndim_other) {
int __pyx_v_i;
int __pyx_v_offset;
int __pyx_t_1;
int __pyx_t_2;
int __pyx_t_3;
/* "View.MemoryView":1344
* int ndim_other) nogil:
* cdef int i
* cdef int offset = ndim_other - ndim # <<<<<<<<<<<<<<
*
* for i in range(ndim - 1, -1, -1):
*/
__pyx_v_offset = (__pyx_v_ndim_other - __pyx_v_ndim);
/* "View.MemoryView":1346
* cdef int offset = ndim_other - ndim
*
* for i in range(ndim - 1, -1, -1): # <<<<<<<<<<<<<<
* mslice.shape[i + offset] = mslice.shape[i]
* mslice.strides[i + offset] = mslice.strides[i]
*/
for (__pyx_t_1 = (__pyx_v_ndim - 1); __pyx_t_1 > -1; __pyx_t_1-=1) {
__pyx_v_i = __pyx_t_1;
/* "View.MemoryView":1347
*
* for i in range(ndim - 1, -1, -1):
* mslice.shape[i + offset] = mslice.shape[i] # <<<<<<<<<<<<<<
* mslice.strides[i + offset] = mslice.strides[i]
* mslice.suboffsets[i + offset] = mslice.suboffsets[i]
*/
(__pyx_v_mslice->shape[(__pyx_v_i + __pyx_v_offset)]) = (__pyx_v_mslice->shape[__pyx_v_i]);
/* "View.MemoryView":1348
* for i in range(ndim - 1, -1, -1):
* mslice.shape[i + offset] = mslice.shape[i]
* mslice.strides[i + offset] = mslice.strides[i] # <<<<<<<<<<<<<<
* mslice.suboffsets[i + offset] = mslice.suboffsets[i]
*
*/
(__pyx_v_mslice->strides[(__pyx_v_i + __pyx_v_offset)]) = (__pyx_v_mslice->strides[__pyx_v_i]);
/* "View.MemoryView":1349
* mslice.shape[i + offset] = mslice.shape[i]
* mslice.strides[i + offset] = mslice.strides[i]
* mslice.suboffsets[i + offset] = mslice.suboffsets[i] # <<<<<<<<<<<<<<
*
* for i in range(offset):
*/
(__pyx_v_mslice->suboffsets[(__pyx_v_i + __pyx_v_offset)]) = (__pyx_v_mslice->suboffsets[__pyx_v_i]);
}
/* "View.MemoryView":1351
* mslice.suboffsets[i + offset] = mslice.suboffsets[i]
*
* for i in range(offset): # <<<<<<<<<<<<<<
* mslice.shape[i] = 1
* mslice.strides[i] = mslice.strides[0]
*/
__pyx_t_1 = __pyx_v_offset;
__pyx_t_2 = __pyx_t_1;
for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) {
__pyx_v_i = __pyx_t_3;
/* "View.MemoryView":1352
*
* for i in range(offset):
* mslice.shape[i] = 1 # <<<<<<<<<<<<<<
* mslice.strides[i] = mslice.strides[0]
* mslice.suboffsets[i] = -1
*/
(__pyx_v_mslice->shape[__pyx_v_i]) = 1;
/* "View.MemoryView":1353
* for i in range(offset):
* mslice.shape[i] = 1
* mslice.strides[i] = mslice.strides[0] # <<<<<<<<<<<<<<
* mslice.suboffsets[i] = -1
*
*/
(__pyx_v_mslice->strides[__pyx_v_i]) = (__pyx_v_mslice->strides[0]);
/* "View.MemoryView":1354
* mslice.shape[i] = 1
* mslice.strides[i] = mslice.strides[0]
* mslice.suboffsets[i] = -1 # <<<<<<<<<<<<<<
*
*
*/
(__pyx_v_mslice->suboffsets[__pyx_v_i]) = -1L;
}
/* "View.MemoryView":1340
*
* @cname('__pyx_memoryview_broadcast_leading')
* cdef void broadcast_leading(__Pyx_memviewslice *mslice, # <<<<<<<<<<<<<<
* int ndim,
* int ndim_other) nogil:
*/
/* function exit code */
}
/* "View.MemoryView":1362
*
* @cname('__pyx_memoryview_refcount_copying')
* cdef void refcount_copying(__Pyx_memviewslice *dst, bint dtype_is_object, # <<<<<<<<<<<<<<
* int ndim, bint inc) nogil:
*
*/
static void __pyx_memoryview_refcount_copying(__Pyx_memviewslice *__pyx_v_dst, int __pyx_v_dtype_is_object, int __pyx_v_ndim, int __pyx_v_inc) {
int __pyx_t_1;
/* "View.MemoryView":1366
*
*
* if dtype_is_object: # <<<<<<<<<<<<<<
* refcount_objects_in_slice_with_gil(dst.data, dst.shape,
* dst.strides, ndim, inc)
*/
__pyx_t_1 = (__pyx_v_dtype_is_object != 0);
if (__pyx_t_1) {
/* "View.MemoryView":1367
*
* if dtype_is_object:
* refcount_objects_in_slice_with_gil(dst.data, dst.shape, # <<<<<<<<<<<<<<
* dst.strides, ndim, inc)
*
*/
__pyx_memoryview_refcount_objects_in_slice_with_gil(__pyx_v_dst->data, __pyx_v_dst->shape, __pyx_v_dst->strides, __pyx_v_ndim, __pyx_v_inc);
/* "View.MemoryView":1366
*
*
* if dtype_is_object: # <<<<<<<<<<<<<<
* refcount_objects_in_slice_with_gil(dst.data, dst.shape,
* dst.strides, ndim, inc)
*/
}
/* "View.MemoryView":1362
*
* @cname('__pyx_memoryview_refcount_copying')
* cdef void refcount_copying(__Pyx_memviewslice *dst, bint dtype_is_object, # <<<<<<<<<<<<<<
* int ndim, bint inc) nogil:
*
*/
/* function exit code */
}
/* "View.MemoryView":1371
*
* @cname('__pyx_memoryview_refcount_objects_in_slice_with_gil')
* cdef void refcount_objects_in_slice_with_gil(char *data, Py_ssize_t *shape, # <<<<<<<<<<<<<<
* Py_ssize_t *strides, int ndim,
* bint inc) with gil:
*/
static void __pyx_memoryview_refcount_objects_in_slice_with_gil(char *__pyx_v_data, Py_ssize_t *__pyx_v_shape, Py_ssize_t *__pyx_v_strides, int __pyx_v_ndim, int __pyx_v_inc) {
__Pyx_RefNannyDeclarations
#ifdef WITH_THREAD
PyGILState_STATE __pyx_gilstate_save = __Pyx_PyGILState_Ensure();
#endif
__Pyx_RefNannySetupContext("refcount_objects_in_slice_with_gil", 0);
/* "View.MemoryView":1374
* Py_ssize_t *strides, int ndim,
* bint inc) with gil:
* refcount_objects_in_slice(data, shape, strides, ndim, inc) # <<<<<<<<<<<<<<
*
* @cname('__pyx_memoryview_refcount_objects_in_slice')
*/
__pyx_memoryview_refcount_objects_in_slice(__pyx_v_data, __pyx_v_shape, __pyx_v_strides, __pyx_v_ndim, __pyx_v_inc);
/* "View.MemoryView":1371
*
* @cname('__pyx_memoryview_refcount_objects_in_slice_with_gil')
* cdef void refcount_objects_in_slice_with_gil(char *data, Py_ssize_t *shape, # <<<<<<<<<<<<<<
* Py_ssize_t *strides, int ndim,
* bint inc) with gil:
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
#ifdef WITH_THREAD
__Pyx_PyGILState_Release(__pyx_gilstate_save);
#endif
}
/* "View.MemoryView":1377
*
* @cname('__pyx_memoryview_refcount_objects_in_slice')
* cdef void refcount_objects_in_slice(char *data, Py_ssize_t *shape, # <<<<<<<<<<<<<<
* Py_ssize_t *strides, int ndim, bint inc):
* cdef Py_ssize_t i
*/
static void __pyx_memoryview_refcount_objects_in_slice(char *__pyx_v_data, Py_ssize_t *__pyx_v_shape, Py_ssize_t *__pyx_v_strides, int __pyx_v_ndim, int __pyx_v_inc) {
CYTHON_UNUSED Py_ssize_t __pyx_v_i;
__Pyx_RefNannyDeclarations
Py_ssize_t __pyx_t_1;
Py_ssize_t __pyx_t_2;
Py_ssize_t __pyx_t_3;
int __pyx_t_4;
__Pyx_RefNannySetupContext("refcount_objects_in_slice", 0);
/* "View.MemoryView":1381
* cdef Py_ssize_t i
*
* for i in range(shape[0]): # <<<<<<<<<<<<<<
* if ndim == 1:
* if inc:
*/
__pyx_t_1 = (__pyx_v_shape[0]);
__pyx_t_2 = __pyx_t_1;
for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) {
__pyx_v_i = __pyx_t_3;
/* "View.MemoryView":1382
*
* for i in range(shape[0]):
* if ndim == 1: # <<<<<<<<<<<<<<
* if inc:
* Py_INCREF((<PyObject **> data)[0])
*/
__pyx_t_4 = ((__pyx_v_ndim == 1) != 0);
if (__pyx_t_4) {
/* "View.MemoryView":1383
* for i in range(shape[0]):
* if ndim == 1:
* if inc: # <<<<<<<<<<<<<<
* Py_INCREF((<PyObject **> data)[0])
* else:
*/
__pyx_t_4 = (__pyx_v_inc != 0);
if (__pyx_t_4) {
/* "View.MemoryView":1384
* if ndim == 1:
* if inc:
* Py_INCREF((<PyObject **> data)[0]) # <<<<<<<<<<<<<<
* else:
* Py_DECREF((<PyObject **> data)[0])
*/
Py_INCREF((((PyObject **)__pyx_v_data)[0]));
/* "View.MemoryView":1383
* for i in range(shape[0]):
* if ndim == 1:
* if inc: # <<<<<<<<<<<<<<
* Py_INCREF((<PyObject **> data)[0])
* else:
*/
goto __pyx_L6;
}
/* "View.MemoryView":1386
* Py_INCREF((<PyObject **> data)[0])
* else:
* Py_DECREF((<PyObject **> data)[0]) # <<<<<<<<<<<<<<
* else:
* refcount_objects_in_slice(data, shape + 1, strides + 1,
*/
/*else*/ {
Py_DECREF((((PyObject **)__pyx_v_data)[0]));
}
__pyx_L6:;
/* "View.MemoryView":1382
*
* for i in range(shape[0]):
* if ndim == 1: # <<<<<<<<<<<<<<
* if inc:
* Py_INCREF((<PyObject **> data)[0])
*/
goto __pyx_L5;
}
/* "View.MemoryView":1388
* Py_DECREF((<PyObject **> data)[0])
* else:
* refcount_objects_in_slice(data, shape + 1, strides + 1, # <<<<<<<<<<<<<<
* ndim - 1, inc)
*
*/
/*else*/ {
/* "View.MemoryView":1389
* else:
* refcount_objects_in_slice(data, shape + 1, strides + 1,
* ndim - 1, inc) # <<<<<<<<<<<<<<
*
* data += strides[0]
*/
__pyx_memoryview_refcount_objects_in_slice(__pyx_v_data, (__pyx_v_shape + 1), (__pyx_v_strides + 1), (__pyx_v_ndim - 1), __pyx_v_inc);
}
__pyx_L5:;
/* "View.MemoryView":1391
* ndim - 1, inc)
*
* data += strides[0] # <<<<<<<<<<<<<<
*
*
*/
__pyx_v_data = (__pyx_v_data + (__pyx_v_strides[0]));
}
/* "View.MemoryView":1377
*
* @cname('__pyx_memoryview_refcount_objects_in_slice')
* cdef void refcount_objects_in_slice(char *data, Py_ssize_t *shape, # <<<<<<<<<<<<<<
* Py_ssize_t *strides, int ndim, bint inc):
* cdef Py_ssize_t i
*/
/* function exit code */
__Pyx_RefNannyFinishContext();
}
/* "View.MemoryView":1397
*
* @cname('__pyx_memoryview_slice_assign_scalar')
* cdef void slice_assign_scalar(__Pyx_memviewslice *dst, int ndim, # <<<<<<<<<<<<<<
* size_t itemsize, void *item,
* bint dtype_is_object) nogil:
*/
static void __pyx_memoryview_slice_assign_scalar(__Pyx_memviewslice *__pyx_v_dst, int __pyx_v_ndim, size_t __pyx_v_itemsize, void *__pyx_v_item, int __pyx_v_dtype_is_object) {
/* "View.MemoryView":1400
* size_t itemsize, void *item,
* bint dtype_is_object) nogil:
* refcount_copying(dst, dtype_is_object, ndim, False) # <<<<<<<<<<<<<<
* _slice_assign_scalar(dst.data, dst.shape, dst.strides, ndim,
* itemsize, item)
*/
__pyx_memoryview_refcount_copying(__pyx_v_dst, __pyx_v_dtype_is_object, __pyx_v_ndim, 0);
/* "View.MemoryView":1401
* bint dtype_is_object) nogil:
* refcount_copying(dst, dtype_is_object, ndim, False)
* _slice_assign_scalar(dst.data, dst.shape, dst.strides, ndim, # <<<<<<<<<<<<<<
* itemsize, item)
* refcount_copying(dst, dtype_is_object, ndim, True)
*/
__pyx_memoryview__slice_assign_scalar(__pyx_v_dst->data, __pyx_v_dst->shape, __pyx_v_dst->strides, __pyx_v_ndim, __pyx_v_itemsize, __pyx_v_item);
/* "View.MemoryView":1403
* _slice_assign_scalar(dst.data, dst.shape, dst.strides, ndim,
* itemsize, item)
* refcount_copying(dst, dtype_is_object, ndim, True) # <<<<<<<<<<<<<<
*
*
*/
__pyx_memoryview_refcount_copying(__pyx_v_dst, __pyx_v_dtype_is_object, __pyx_v_ndim, 1);
/* "View.MemoryView":1397
*
* @cname('__pyx_memoryview_slice_assign_scalar')
* cdef void slice_assign_scalar(__Pyx_memviewslice *dst, int ndim, # <<<<<<<<<<<<<<
* size_t itemsize, void *item,
* bint dtype_is_object) nogil:
*/
/* function exit code */
}
/* "View.MemoryView":1407
*
* @cname('__pyx_memoryview__slice_assign_scalar')
* cdef void _slice_assign_scalar(char *data, Py_ssize_t *shape, # <<<<<<<<<<<<<<
* Py_ssize_t *strides, int ndim,
* size_t itemsize, void *item) nogil:
*/
static void __pyx_memoryview__slice_assign_scalar(char *__pyx_v_data, Py_ssize_t *__pyx_v_shape, Py_ssize_t *__pyx_v_strides, int __pyx_v_ndim, size_t __pyx_v_itemsize, void *__pyx_v_item) {
CYTHON_UNUSED Py_ssize_t __pyx_v_i;
Py_ssize_t __pyx_v_stride;
Py_ssize_t __pyx_v_extent;
int __pyx_t_1;
Py_ssize_t __pyx_t_2;
Py_ssize_t __pyx_t_3;
Py_ssize_t __pyx_t_4;
/* "View.MemoryView":1411
* size_t itemsize, void *item) nogil:
* cdef Py_ssize_t i
* cdef Py_ssize_t stride = strides[0] # <<<<<<<<<<<<<<
* cdef Py_ssize_t extent = shape[0]
*
*/
__pyx_v_stride = (__pyx_v_strides[0]);
/* "View.MemoryView":1412
* cdef Py_ssize_t i
* cdef Py_ssize_t stride = strides[0]
* cdef Py_ssize_t extent = shape[0] # <<<<<<<<<<<<<<
*
* if ndim == 1:
*/
__pyx_v_extent = (__pyx_v_shape[0]);
/* "View.MemoryView":1414
* cdef Py_ssize_t extent = shape[0]
*
* if ndim == 1: # <<<<<<<<<<<<<<
* for i in range(extent):
* memcpy(data, item, itemsize)
*/
__pyx_t_1 = ((__pyx_v_ndim == 1) != 0);
if (__pyx_t_1) {
/* "View.MemoryView":1415
*
* if ndim == 1:
* for i in range(extent): # <<<<<<<<<<<<<<
* memcpy(data, item, itemsize)
* data += stride
*/
__pyx_t_2 = __pyx_v_extent;
__pyx_t_3 = __pyx_t_2;
for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {
__pyx_v_i = __pyx_t_4;
/* "View.MemoryView":1416
* if ndim == 1:
* for i in range(extent):
* memcpy(data, item, itemsize) # <<<<<<<<<<<<<<
* data += stride
* else:
*/
(void)(memcpy(__pyx_v_data, __pyx_v_item, __pyx_v_itemsize));
/* "View.MemoryView":1417
* for i in range(extent):
* memcpy(data, item, itemsize)
* data += stride # <<<<<<<<<<<<<<
* else:
* for i in range(extent):
*/
__pyx_v_data = (__pyx_v_data + __pyx_v_stride);
}
/* "View.MemoryView":1414
* cdef Py_ssize_t extent = shape[0]
*
* if ndim == 1: # <<<<<<<<<<<<<<
* for i in range(extent):
* memcpy(data, item, itemsize)
*/
goto __pyx_L3;
}
/* "View.MemoryView":1419
* data += stride
* else:
* for i in range(extent): # <<<<<<<<<<<<<<
* _slice_assign_scalar(data, shape + 1, strides + 1,
* ndim - 1, itemsize, item)
*/
/*else*/ {
__pyx_t_2 = __pyx_v_extent;
__pyx_t_3 = __pyx_t_2;
for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {
__pyx_v_i = __pyx_t_4;
/* "View.MemoryView":1420
* else:
* for i in range(extent):
* _slice_assign_scalar(data, shape + 1, strides + 1, # <<<<<<<<<<<<<<
* ndim - 1, itemsize, item)
* data += stride
*/
__pyx_memoryview__slice_assign_scalar(__pyx_v_data, (__pyx_v_shape + 1), (__pyx_v_strides + 1), (__pyx_v_ndim - 1), __pyx_v_itemsize, __pyx_v_item);
/* "View.MemoryView":1422
* _slice_assign_scalar(data, shape + 1, strides + 1,
* ndim - 1, itemsize, item)
* data += stride # <<<<<<<<<<<<<<
*
*
*/
__pyx_v_data = (__pyx_v_data + __pyx_v_stride);
}
}
__pyx_L3:;
/* "View.MemoryView":1407
*
* @cname('__pyx_memoryview__slice_assign_scalar')
* cdef void _slice_assign_scalar(char *data, Py_ssize_t *shape, # <<<<<<<<<<<<<<
* Py_ssize_t *strides, int ndim,
* size_t itemsize, void *item) nogil:
*/
/* function exit code */
}
/* "(tree fragment)":1
* def __pyx_unpickle_Enum(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
* cdef object __pyx_PickleError
* cdef object __pyx_result
*/
/* Python wrapper */
static PyObject *__pyx_pw_15View_dot_MemoryView_1__pyx_unpickle_Enum(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static PyMethodDef __pyx_mdef_15View_dot_MemoryView_1__pyx_unpickle_Enum = {"__pyx_unpickle_Enum", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_15View_dot_MemoryView_1__pyx_unpickle_Enum, METH_VARARGS|METH_KEYWORDS, 0};
static PyObject *__pyx_pw_15View_dot_MemoryView_1__pyx_unpickle_Enum(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v___pyx_type = 0;
long __pyx_v___pyx_checksum;
PyObject *__pyx_v___pyx_state = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__pyx_unpickle_Enum (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0};
PyObject* values[3] = {0,0,0};
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
CYTHON_FALLTHROUGH;
case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
CYTHON_FALLTHROUGH;
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
CYTHON_FALLTHROUGH;
case 1:
if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__pyx_unpickle_Enum", 1, 3, 3, 1); __PYX_ERR(2, 1, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 2:
if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__pyx_unpickle_Enum", 1, 3, 3, 2); __PYX_ERR(2, 1, __pyx_L3_error)
}
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_Enum") < 0)) __PYX_ERR(2, 1, __pyx_L3_error)
}
} else if (PyTuple_GET_SIZE(__pyx_args) != 3) {
goto __pyx_L5_argtuple_error;
} else {
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
}
__pyx_v___pyx_type = values[0];
__pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(2, 1, __pyx_L3_error)
__pyx_v___pyx_state = values[2];
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("__pyx_unpickle_Enum", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(2, 1, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("View.MemoryView.__pyx_unpickle_Enum", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return NULL;
__pyx_L4_argument_unpacking_done:;
__pyx_r = __pyx_pf_15View_dot_MemoryView___pyx_unpickle_Enum(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state);
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_15View_dot_MemoryView___pyx_unpickle_Enum(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_v___pyx_PickleError = 0;
PyObject *__pyx_v___pyx_result = 0;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_t_6;
__Pyx_RefNannySetupContext("__pyx_unpickle_Enum", 0);
/* "(tree fragment)":4
* cdef object __pyx_PickleError
* cdef object __pyx_result
* if __pyx_checksum != 0xb068931: # <<<<<<<<<<<<<<
* from pickle import PickleError as __pyx_PickleError
* raise __pyx_PickleError("Incompatible checksums (%s vs 0xb068931 = (name))" % __pyx_checksum)
*/
__pyx_t_1 = ((__pyx_v___pyx_checksum != 0xb068931) != 0);
if (__pyx_t_1) {
/* "(tree fragment)":5
* cdef object __pyx_result
* if __pyx_checksum != 0xb068931:
* from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<<
* raise __pyx_PickleError("Incompatible checksums (%s vs 0xb068931 = (name))" % __pyx_checksum)
* __pyx_result = Enum.__new__(__pyx_type)
*/
__pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 5, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_INCREF(__pyx_n_s_PickleError);
__Pyx_GIVEREF(__pyx_n_s_PickleError);
PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError);
__pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 5, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 5, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_INCREF(__pyx_t_2);
__pyx_v___pyx_PickleError = __pyx_t_2;
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "(tree fragment)":6
* if __pyx_checksum != 0xb068931:
* from pickle import PickleError as __pyx_PickleError
* raise __pyx_PickleError("Incompatible checksums (%s vs 0xb068931 = (name))" % __pyx_checksum) # <<<<<<<<<<<<<<
* __pyx_result = Enum.__new__(__pyx_type)
* if __pyx_state is not None:
*/
__pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 6, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0xb0, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 6, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_INCREF(__pyx_v___pyx_PickleError);
__pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2);
if (likely(__pyx_t_5)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
__Pyx_INCREF(__pyx_t_5);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_2, function);
}
}
__pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4);
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 6, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
__PYX_ERR(2, 6, __pyx_L1_error)
/* "(tree fragment)":4
* cdef object __pyx_PickleError
* cdef object __pyx_result
* if __pyx_checksum != 0xb068931: # <<<<<<<<<<<<<<
* from pickle import PickleError as __pyx_PickleError
* raise __pyx_PickleError("Incompatible checksums (%s vs 0xb068931 = (name))" % __pyx_checksum)
*/
}
/* "(tree fragment)":7
* from pickle import PickleError as __pyx_PickleError
* raise __pyx_PickleError("Incompatible checksums (%s vs 0xb068931 = (name))" % __pyx_checksum)
* __pyx_result = Enum.__new__(__pyx_type) # <<<<<<<<<<<<<<
* if __pyx_state is not None:
* __pyx_unpickle_Enum__set_state(<Enum> __pyx_result, __pyx_state)
*/
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_MemviewEnum_type), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(2, 7, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__pyx_t_4 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) {
__pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2);
if (likely(__pyx_t_4)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
__Pyx_INCREF(__pyx_t_4);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_2, function);
}
}
__pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type);
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 7, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_v___pyx_result = __pyx_t_3;
__pyx_t_3 = 0;
/* "(tree fragment)":8
* raise __pyx_PickleError("Incompatible checksums (%s vs 0xb068931 = (name))" % __pyx_checksum)
* __pyx_result = Enum.__new__(__pyx_type)
* if __pyx_state is not None: # <<<<<<<<<<<<<<
* __pyx_unpickle_Enum__set_state(<Enum> __pyx_result, __pyx_state)
* return __pyx_result
*/
__pyx_t_1 = (__pyx_v___pyx_state != Py_None);
__pyx_t_6 = (__pyx_t_1 != 0);
if (__pyx_t_6) {
/* "(tree fragment)":9
* __pyx_result = Enum.__new__(__pyx_type)
* if __pyx_state is not None:
* __pyx_unpickle_Enum__set_state(<Enum> __pyx_result, __pyx_state) # <<<<<<<<<<<<<<
* return __pyx_result
* cdef __pyx_unpickle_Enum__set_state(Enum __pyx_result, tuple __pyx_state):
*/
if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(2, 9, __pyx_L1_error)
__pyx_t_3 = __pyx_unpickle_Enum__set_state(((struct __pyx_MemviewEnum_obj *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 9, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
/* "(tree fragment)":8
* raise __pyx_PickleError("Incompatible checksums (%s vs 0xb068931 = (name))" % __pyx_checksum)
* __pyx_result = Enum.__new__(__pyx_type)
* if __pyx_state is not None: # <<<<<<<<<<<<<<
* __pyx_unpickle_Enum__set_state(<Enum> __pyx_result, __pyx_state)
* return __pyx_result
*/
}
/* "(tree fragment)":10
* if __pyx_state is not None:
* __pyx_unpickle_Enum__set_state(<Enum> __pyx_result, __pyx_state)
* return __pyx_result # <<<<<<<<<<<<<<
* cdef __pyx_unpickle_Enum__set_state(Enum __pyx_result, tuple __pyx_state):
* __pyx_result.name = __pyx_state[0]
*/
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v___pyx_result);
__pyx_r = __pyx_v___pyx_result;
goto __pyx_L0;
/* "(tree fragment)":1
* def __pyx_unpickle_Enum(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
* cdef object __pyx_PickleError
* cdef object __pyx_result
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("View.MemoryView.__pyx_unpickle_Enum", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v___pyx_PickleError);
__Pyx_XDECREF(__pyx_v___pyx_result);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":11
* __pyx_unpickle_Enum__set_state(<Enum> __pyx_result, __pyx_state)
* return __pyx_result
* cdef __pyx_unpickle_Enum__set_state(Enum __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<<
* __pyx_result.name = __pyx_state[0]
* if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'):
*/
static PyObject *__pyx_unpickle_Enum__set_state(struct __pyx_MemviewEnum_obj *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
Py_ssize_t __pyx_t_3;
int __pyx_t_4;
int __pyx_t_5;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
__Pyx_RefNannySetupContext("__pyx_unpickle_Enum__set_state", 0);
/* "(tree fragment)":12
* return __pyx_result
* cdef __pyx_unpickle_Enum__set_state(Enum __pyx_result, tuple __pyx_state):
* __pyx_result.name = __pyx_state[0] # <<<<<<<<<<<<<<
* if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'):
* __pyx_result.__dict__.update(__pyx_state[1])
*/
if (unlikely(__pyx_v___pyx_state == Py_None)) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
__PYX_ERR(2, 12, __pyx_L1_error)
}
__pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 12, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_v___pyx_result->name);
__Pyx_DECREF(__pyx_v___pyx_result->name);
__pyx_v___pyx_result->name = __pyx_t_1;
__pyx_t_1 = 0;
/* "(tree fragment)":13
* cdef __pyx_unpickle_Enum__set_state(Enum __pyx_result, tuple __pyx_state):
* __pyx_result.name = __pyx_state[0]
* if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<<
* __pyx_result.__dict__.update(__pyx_state[1])
*/
if (unlikely(__pyx_v___pyx_state == Py_None)) {
PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()");
__PYX_ERR(2, 13, __pyx_L1_error)
}
__pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(2, 13, __pyx_L1_error)
__pyx_t_4 = ((__pyx_t_3 > 1) != 0);
if (__pyx_t_4) {
} else {
__pyx_t_2 = __pyx_t_4;
goto __pyx_L4_bool_binop_done;
}
__pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 13, __pyx_L1_error)
__pyx_t_5 = (__pyx_t_4 != 0);
__pyx_t_2 = __pyx_t_5;
__pyx_L4_bool_binop_done:;
if (__pyx_t_2) {
/* "(tree fragment)":14
* __pyx_result.name = __pyx_state[0]
* if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'):
* __pyx_result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<<
*/
__pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(2, 14, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_7);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(__pyx_v___pyx_state == Py_None)) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable");
__PYX_ERR(2, 14, __pyx_L1_error)
}
__pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(2, 14, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_6);
__pyx_t_8 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) {
__pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7);
if (likely(__pyx_t_8)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7);
__Pyx_INCREF(__pyx_t_8);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_7, function);
}
}
__pyx_t_1 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6);
__Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 14, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "(tree fragment)":13
* cdef __pyx_unpickle_Enum__set_state(Enum __pyx_result, tuple __pyx_state):
* __pyx_result.name = __pyx_state[0]
* if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<<
* __pyx_result.__dict__.update(__pyx_state[1])
*/
}
/* "(tree fragment)":11
* __pyx_unpickle_Enum__set_state(<Enum> __pyx_result, __pyx_state)
* return __pyx_result
* cdef __pyx_unpickle_Enum__set_state(Enum __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<<
* __pyx_result.name = __pyx_state[0]
* if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'):
*/
/* function exit code */
__pyx_r = Py_None; __Pyx_INCREF(Py_None);
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("View.MemoryView.__pyx_unpickle_Enum__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = 0;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static struct __pyx_vtabstruct_array __pyx_vtable_array;
static PyObject *__pyx_tp_new_array(PyTypeObject *t, PyObject *a, PyObject *k) {
struct __pyx_array_obj *p;
PyObject *o;
if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
o = (*t->tp_alloc)(t, 0);
} else {
o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
}
if (unlikely(!o)) return 0;
p = ((struct __pyx_array_obj *)o);
p->__pyx_vtab = __pyx_vtabptr_array;
p->mode = ((PyObject*)Py_None); Py_INCREF(Py_None);
p->_format = ((PyObject*)Py_None); Py_INCREF(Py_None);
if (unlikely(__pyx_array___cinit__(o, a, k) < 0)) goto bad;
return o;
bad:
Py_DECREF(o); o = 0;
return NULL;
}
static void __pyx_tp_dealloc_array(PyObject *o) {
struct __pyx_array_obj *p = (struct __pyx_array_obj *)o;
#if CYTHON_USE_TP_FINALIZE
if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && (!PyType_IS_GC(Py_TYPE(o)) || !_PyGC_FINALIZED(o))) {
if (PyObject_CallFinalizerFromDealloc(o)) return;
}
#endif
{
PyObject *etype, *eval, *etb;
PyErr_Fetch(&etype, &eval, &etb);
++Py_REFCNT(o);
__pyx_array___dealloc__(o);
--Py_REFCNT(o);
PyErr_Restore(etype, eval, etb);
}
Py_CLEAR(p->mode);
Py_CLEAR(p->_format);
(*Py_TYPE(o)->tp_free)(o);
}
static PyObject *__pyx_sq_item_array(PyObject *o, Py_ssize_t i) {
PyObject *r;
PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0;
r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x);
Py_DECREF(x);
return r;
}
static int __pyx_mp_ass_subscript_array(PyObject *o, PyObject *i, PyObject *v) {
if (v) {
return __pyx_array___setitem__(o, i, v);
}
else {
PyErr_Format(PyExc_NotImplementedError,
"Subscript deletion not supported by %.200s", Py_TYPE(o)->tp_name);
return -1;
}
}
static PyObject *__pyx_tp_getattro_array(PyObject *o, PyObject *n) {
PyObject *v = __Pyx_PyObject_GenericGetAttr(o, n);
if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Clear();
v = __pyx_array___getattr__(o, n);
}
return v;
}
static PyObject *__pyx_getprop___pyx_array_memview(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_5array_7memview_1__get__(o);
}
static PyMethodDef __pyx_methods_array[] = {
{"__getattr__", (PyCFunction)__pyx_array___getattr__, METH_O|METH_COEXIST, 0},
{"__reduce_cython__", (PyCFunction)__pyx_pw___pyx_array_1__reduce_cython__, METH_NOARGS, 0},
{"__setstate_cython__", (PyCFunction)__pyx_pw___pyx_array_3__setstate_cython__, METH_O, 0},
{0, 0, 0, 0}
};
static struct PyGetSetDef __pyx_getsets_array[] = {
{(char *)"memview", __pyx_getprop___pyx_array_memview, 0, (char *)0, 0},
{0, 0, 0, 0, 0}
};
static PySequenceMethods __pyx_tp_as_sequence_array = {
__pyx_array___len__, /*sq_length*/
0, /*sq_concat*/
0, /*sq_repeat*/
__pyx_sq_item_array, /*sq_item*/
0, /*sq_slice*/
0, /*sq_ass_item*/
0, /*sq_ass_slice*/
0, /*sq_contains*/
0, /*sq_inplace_concat*/
0, /*sq_inplace_repeat*/
};
static PyMappingMethods __pyx_tp_as_mapping_array = {
__pyx_array___len__, /*mp_length*/
__pyx_array___getitem__, /*mp_subscript*/
__pyx_mp_ass_subscript_array, /*mp_ass_subscript*/
};
static PyBufferProcs __pyx_tp_as_buffer_array = {
#if PY_MAJOR_VERSION < 3
0, /*bf_getreadbuffer*/
#endif
#if PY_MAJOR_VERSION < 3
0, /*bf_getwritebuffer*/
#endif
#if PY_MAJOR_VERSION < 3
0, /*bf_getsegcount*/
#endif
#if PY_MAJOR_VERSION < 3
0, /*bf_getcharbuffer*/
#endif
__pyx_array_getbuffer, /*bf_getbuffer*/
0, /*bf_releasebuffer*/
};
static PyTypeObject __pyx_type___pyx_array = {
PyVarObject_HEAD_INIT(0, 0)
"msanomalydetector._anomaly_kernel_cython.array", /*tp_name*/
sizeof(struct __pyx_array_obj), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_array, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0, /*tp_compare*/
#endif
#if PY_MAJOR_VERSION >= 3
0, /*tp_as_async*/
#endif
0, /*tp_repr*/
0, /*tp_as_number*/
&__pyx_tp_as_sequence_array, /*tp_as_sequence*/
&__pyx_tp_as_mapping_array, /*tp_as_mapping*/
0, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
__pyx_tp_getattro_array, /*tp_getattro*/
0, /*tp_setattro*/
&__pyx_tp_as_buffer_array, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE, /*tp_flags*/
0, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_methods_array, /*tp_methods*/
0, /*tp_members*/
__pyx_getsets_array, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
0, /*tp_init*/
0, /*tp_alloc*/
__pyx_tp_new_array, /*tp_new*/
0, /*tp_free*/
0, /*tp_is_gc*/
0, /*tp_bases*/
0, /*tp_mro*/
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
0, /*tp_del*/
0, /*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
};
static PyObject *__pyx_tp_new_Enum(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) {
struct __pyx_MemviewEnum_obj *p;
PyObject *o;
if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
o = (*t->tp_alloc)(t, 0);
} else {
o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
}
if (unlikely(!o)) return 0;
p = ((struct __pyx_MemviewEnum_obj *)o);
p->name = Py_None; Py_INCREF(Py_None);
return o;
}
static void __pyx_tp_dealloc_Enum(PyObject *o) {
struct __pyx_MemviewEnum_obj *p = (struct __pyx_MemviewEnum_obj *)o;
#if CYTHON_USE_TP_FINALIZE
if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
if (PyObject_CallFinalizerFromDealloc(o)) return;
}
#endif
PyObject_GC_UnTrack(o);
Py_CLEAR(p->name);
(*Py_TYPE(o)->tp_free)(o);
}
static int __pyx_tp_traverse_Enum(PyObject *o, visitproc v, void *a) {
int e;
struct __pyx_MemviewEnum_obj *p = (struct __pyx_MemviewEnum_obj *)o;
if (p->name) {
e = (*v)(p->name, a); if (e) return e;
}
return 0;
}
static int __pyx_tp_clear_Enum(PyObject *o) {
PyObject* tmp;
struct __pyx_MemviewEnum_obj *p = (struct __pyx_MemviewEnum_obj *)o;
tmp = ((PyObject*)p->name);
p->name = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
return 0;
}
static PyMethodDef __pyx_methods_Enum[] = {
{"__reduce_cython__", (PyCFunction)__pyx_pw___pyx_MemviewEnum_1__reduce_cython__, METH_NOARGS, 0},
{"__setstate_cython__", (PyCFunction)__pyx_pw___pyx_MemviewEnum_3__setstate_cython__, METH_O, 0},
{0, 0, 0, 0}
};
static PyTypeObject __pyx_type___pyx_MemviewEnum = {
PyVarObject_HEAD_INIT(0, 0)
"msanomalydetector._anomaly_kernel_cython.Enum", /*tp_name*/
sizeof(struct __pyx_MemviewEnum_obj), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_Enum, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0, /*tp_compare*/
#endif
#if PY_MAJOR_VERSION >= 3
0, /*tp_as_async*/
#endif
__pyx_MemviewEnum___repr__, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
0, /*tp_doc*/
__pyx_tp_traverse_Enum, /*tp_traverse*/
__pyx_tp_clear_Enum, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_methods_Enum, /*tp_methods*/
0, /*tp_members*/
0, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
__pyx_MemviewEnum___init__, /*tp_init*/
0, /*tp_alloc*/
__pyx_tp_new_Enum, /*tp_new*/
0, /*tp_free*/
0, /*tp_is_gc*/
0, /*tp_bases*/
0, /*tp_mro*/
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
0, /*tp_del*/
0, /*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
};
static struct __pyx_vtabstruct_memoryview __pyx_vtable_memoryview;
static PyObject *__pyx_tp_new_memoryview(PyTypeObject *t, PyObject *a, PyObject *k) {
struct __pyx_memoryview_obj *p;
PyObject *o;
if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
o = (*t->tp_alloc)(t, 0);
} else {
o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
}
if (unlikely(!o)) return 0;
p = ((struct __pyx_memoryview_obj *)o);
p->__pyx_vtab = __pyx_vtabptr_memoryview;
p->obj = Py_None; Py_INCREF(Py_None);
p->_size = Py_None; Py_INCREF(Py_None);
p->_array_interface = Py_None; Py_INCREF(Py_None);
p->view.obj = NULL;
if (unlikely(__pyx_memoryview___cinit__(o, a, k) < 0)) goto bad;
return o;
bad:
Py_DECREF(o); o = 0;
return NULL;
}
static void __pyx_tp_dealloc_memoryview(PyObject *o) {
struct __pyx_memoryview_obj *p = (struct __pyx_memoryview_obj *)o;
#if CYTHON_USE_TP_FINALIZE
if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
if (PyObject_CallFinalizerFromDealloc(o)) return;
}
#endif
PyObject_GC_UnTrack(o);
{
PyObject *etype, *eval, *etb;
PyErr_Fetch(&etype, &eval, &etb);
++Py_REFCNT(o);
__pyx_memoryview___dealloc__(o);
--Py_REFCNT(o);
PyErr_Restore(etype, eval, etb);
}
Py_CLEAR(p->obj);
Py_CLEAR(p->_size);
Py_CLEAR(p->_array_interface);
(*Py_TYPE(o)->tp_free)(o);
}
static int __pyx_tp_traverse_memoryview(PyObject *o, visitproc v, void *a) {
int e;
struct __pyx_memoryview_obj *p = (struct __pyx_memoryview_obj *)o;
if (p->obj) {
e = (*v)(p->obj, a); if (e) return e;
}
if (p->_size) {
e = (*v)(p->_size, a); if (e) return e;
}
if (p->_array_interface) {
e = (*v)(p->_array_interface, a); if (e) return e;
}
if (p->view.obj) {
e = (*v)(p->view.obj, a); if (e) return e;
}
return 0;
}
static int __pyx_tp_clear_memoryview(PyObject *o) {
PyObject* tmp;
struct __pyx_memoryview_obj *p = (struct __pyx_memoryview_obj *)o;
tmp = ((PyObject*)p->obj);
p->obj = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_size);
p->_size = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
tmp = ((PyObject*)p->_array_interface);
p->_array_interface = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
Py_CLEAR(p->view.obj);
return 0;
}
static PyObject *__pyx_sq_item_memoryview(PyObject *o, Py_ssize_t i) {
PyObject *r;
PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0;
r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x);
Py_DECREF(x);
return r;
}
static int __pyx_mp_ass_subscript_memoryview(PyObject *o, PyObject *i, PyObject *v) {
if (v) {
return __pyx_memoryview___setitem__(o, i, v);
}
else {
PyErr_Format(PyExc_NotImplementedError,
"Subscript deletion not supported by %.200s", Py_TYPE(o)->tp_name);
return -1;
}
}
static PyObject *__pyx_getprop___pyx_memoryview_T(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_1T_1__get__(o);
}
static PyObject *__pyx_getprop___pyx_memoryview_base(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_4base_1__get__(o);
}
static PyObject *__pyx_getprop___pyx_memoryview_shape(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_5shape_1__get__(o);
}
static PyObject *__pyx_getprop___pyx_memoryview_strides(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_7strides_1__get__(o);
}
static PyObject *__pyx_getprop___pyx_memoryview_suboffsets(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_10suboffsets_1__get__(o);
}
static PyObject *__pyx_getprop___pyx_memoryview_ndim(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_4ndim_1__get__(o);
}
static PyObject *__pyx_getprop___pyx_memoryview_itemsize(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_8itemsize_1__get__(o);
}
static PyObject *__pyx_getprop___pyx_memoryview_nbytes(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_6nbytes_1__get__(o);
}
static PyObject *__pyx_getprop___pyx_memoryview_size(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_10memoryview_4size_1__get__(o);
}
static PyMethodDef __pyx_methods_memoryview[] = {
{"is_c_contig", (PyCFunction)__pyx_memoryview_is_c_contig, METH_NOARGS, 0},
{"is_f_contig", (PyCFunction)__pyx_memoryview_is_f_contig, METH_NOARGS, 0},
{"copy", (PyCFunction)__pyx_memoryview_copy, METH_NOARGS, 0},
{"copy_fortran", (PyCFunction)__pyx_memoryview_copy_fortran, METH_NOARGS, 0},
{"__reduce_cython__", (PyCFunction)__pyx_pw___pyx_memoryview_1__reduce_cython__, METH_NOARGS, 0},
{"__setstate_cython__", (PyCFunction)__pyx_pw___pyx_memoryview_3__setstate_cython__, METH_O, 0},
{0, 0, 0, 0}
};
static struct PyGetSetDef __pyx_getsets_memoryview[] = {
{(char *)"T", __pyx_getprop___pyx_memoryview_T, 0, (char *)0, 0},
{(char *)"base", __pyx_getprop___pyx_memoryview_base, 0, (char *)0, 0},
{(char *)"shape", __pyx_getprop___pyx_memoryview_shape, 0, (char *)0, 0},
{(char *)"strides", __pyx_getprop___pyx_memoryview_strides, 0, (char *)0, 0},
{(char *)"suboffsets", __pyx_getprop___pyx_memoryview_suboffsets, 0, (char *)0, 0},
{(char *)"ndim", __pyx_getprop___pyx_memoryview_ndim, 0, (char *)0, 0},
{(char *)"itemsize", __pyx_getprop___pyx_memoryview_itemsize, 0, (char *)0, 0},
{(char *)"nbytes", __pyx_getprop___pyx_memoryview_nbytes, 0, (char *)0, 0},
{(char *)"size", __pyx_getprop___pyx_memoryview_size, 0, (char *)0, 0},
{0, 0, 0, 0, 0}
};
static PySequenceMethods __pyx_tp_as_sequence_memoryview = {
__pyx_memoryview___len__, /*sq_length*/
0, /*sq_concat*/
0, /*sq_repeat*/
__pyx_sq_item_memoryview, /*sq_item*/
0, /*sq_slice*/
0, /*sq_ass_item*/
0, /*sq_ass_slice*/
0, /*sq_contains*/
0, /*sq_inplace_concat*/
0, /*sq_inplace_repeat*/
};
static PyMappingMethods __pyx_tp_as_mapping_memoryview = {
__pyx_memoryview___len__, /*mp_length*/
__pyx_memoryview___getitem__, /*mp_subscript*/
__pyx_mp_ass_subscript_memoryview, /*mp_ass_subscript*/
};
static PyBufferProcs __pyx_tp_as_buffer_memoryview = {
#if PY_MAJOR_VERSION < 3
0, /*bf_getreadbuffer*/
#endif
#if PY_MAJOR_VERSION < 3
0, /*bf_getwritebuffer*/
#endif
#if PY_MAJOR_VERSION < 3
0, /*bf_getsegcount*/
#endif
#if PY_MAJOR_VERSION < 3
0, /*bf_getcharbuffer*/
#endif
__pyx_memoryview_getbuffer, /*bf_getbuffer*/
0, /*bf_releasebuffer*/
};
static PyTypeObject __pyx_type___pyx_memoryview = {
PyVarObject_HEAD_INIT(0, 0)
"msanomalydetector._anomaly_kernel_cython.memoryview", /*tp_name*/
sizeof(struct __pyx_memoryview_obj), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_memoryview, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0, /*tp_compare*/
#endif
#if PY_MAJOR_VERSION >= 3
0, /*tp_as_async*/
#endif
__pyx_memoryview___repr__, /*tp_repr*/
0, /*tp_as_number*/
&__pyx_tp_as_sequence_memoryview, /*tp_as_sequence*/
&__pyx_tp_as_mapping_memoryview, /*tp_as_mapping*/
0, /*tp_hash*/
0, /*tp_call*/
__pyx_memoryview___str__, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
&__pyx_tp_as_buffer_memoryview, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
0, /*tp_doc*/
__pyx_tp_traverse_memoryview, /*tp_traverse*/
__pyx_tp_clear_memoryview, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_methods_memoryview, /*tp_methods*/
0, /*tp_members*/
__pyx_getsets_memoryview, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
0, /*tp_init*/
0, /*tp_alloc*/
__pyx_tp_new_memoryview, /*tp_new*/
0, /*tp_free*/
0, /*tp_is_gc*/
0, /*tp_bases*/
0, /*tp_mro*/
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
0, /*tp_del*/
0, /*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
};
static struct __pyx_vtabstruct__memoryviewslice __pyx_vtable__memoryviewslice;
static PyObject *__pyx_tp_new__memoryviewslice(PyTypeObject *t, PyObject *a, PyObject *k) {
struct __pyx_memoryviewslice_obj *p;
PyObject *o = __pyx_tp_new_memoryview(t, a, k);
if (unlikely(!o)) return 0;
p = ((struct __pyx_memoryviewslice_obj *)o);
p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_memoryview*)__pyx_vtabptr__memoryviewslice;
p->from_object = Py_None; Py_INCREF(Py_None);
p->from_slice.memview = NULL;
return o;
}
static void __pyx_tp_dealloc__memoryviewslice(PyObject *o) {
struct __pyx_memoryviewslice_obj *p = (struct __pyx_memoryviewslice_obj *)o;
#if CYTHON_USE_TP_FINALIZE
if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
if (PyObject_CallFinalizerFromDealloc(o)) return;
}
#endif
PyObject_GC_UnTrack(o);
{
PyObject *etype, *eval, *etb;
PyErr_Fetch(&etype, &eval, &etb);
++Py_REFCNT(o);
__pyx_memoryviewslice___dealloc__(o);
--Py_REFCNT(o);
PyErr_Restore(etype, eval, etb);
}
Py_CLEAR(p->from_object);
PyObject_GC_Track(o);
__pyx_tp_dealloc_memoryview(o);
}
static int __pyx_tp_traverse__memoryviewslice(PyObject *o, visitproc v, void *a) {
int e;
struct __pyx_memoryviewslice_obj *p = (struct __pyx_memoryviewslice_obj *)o;
e = __pyx_tp_traverse_memoryview(o, v, a); if (e) return e;
if (p->from_object) {
e = (*v)(p->from_object, a); if (e) return e;
}
return 0;
}
static int __pyx_tp_clear__memoryviewslice(PyObject *o) {
PyObject* tmp;
struct __pyx_memoryviewslice_obj *p = (struct __pyx_memoryviewslice_obj *)o;
__pyx_tp_clear_memoryview(o);
tmp = ((PyObject*)p->from_object);
p->from_object = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
__PYX_XDEC_MEMVIEW(&p->from_slice, 1);
return 0;
}
static PyObject *__pyx_getprop___pyx_memoryviewslice_base(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_15View_dot_MemoryView_16_memoryviewslice_4base_1__get__(o);
}
static PyMethodDef __pyx_methods__memoryviewslice[] = {
{"__reduce_cython__", (PyCFunction)__pyx_pw___pyx_memoryviewslice_1__reduce_cython__, METH_NOARGS, 0},
{"__setstate_cython__", (PyCFunction)__pyx_pw___pyx_memoryviewslice_3__setstate_cython__, METH_O, 0},
{0, 0, 0, 0}
};
static struct PyGetSetDef __pyx_getsets__memoryviewslice[] = {
{(char *)"base", __pyx_getprop___pyx_memoryviewslice_base, 0, (char *)0, 0},
{0, 0, 0, 0, 0}
};
static PyTypeObject __pyx_type___pyx_memoryviewslice = {
PyVarObject_HEAD_INIT(0, 0)
"msanomalydetector._anomaly_kernel_cython._memoryviewslice", /*tp_name*/
sizeof(struct __pyx_memoryviewslice_obj), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc__memoryviewslice, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0, /*tp_compare*/
#endif
#if PY_MAJOR_VERSION >= 3
0, /*tp_as_async*/
#endif
#if CYTHON_COMPILING_IN_PYPY
__pyx_memoryview___repr__, /*tp_repr*/
#else
0, /*tp_repr*/
#endif
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash*/
0, /*tp_call*/
#if CYTHON_COMPILING_IN_PYPY
__pyx_memoryview___str__, /*tp_str*/
#else
0, /*tp_str*/
#endif
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
"Internal class for passing memoryview slices to Python", /*tp_doc*/
__pyx_tp_traverse__memoryviewslice, /*tp_traverse*/
__pyx_tp_clear__memoryviewslice, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_methods__memoryviewslice, /*tp_methods*/
0, /*tp_members*/
__pyx_getsets__memoryviewslice, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
0, /*tp_init*/
0, /*tp_alloc*/
__pyx_tp_new__memoryviewslice, /*tp_new*/
0, /*tp_free*/
0, /*tp_is_gc*/
0, /*tp_bases*/
0, /*tp_mro*/
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
0, /*tp_del*/
0, /*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
};
static PyMethodDef __pyx_methods[] = {
{"sorted_median", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_17msanomalydetector_22_anomaly_kernel_cython_1sorted_median, METH_VARARGS|METH_KEYWORDS, 0},
{"median_filter", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_17msanomalydetector_22_anomaly_kernel_cython_3median_filter, METH_VARARGS|METH_KEYWORDS, 0},
{0, 0, 0, 0}
};
#if PY_MAJOR_VERSION >= 3
#if CYTHON_PEP489_MULTI_PHASE_INIT
static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/
static int __pyx_pymod_exec__anomaly_kernel_cython(PyObject* module); /*proto*/
static PyModuleDef_Slot __pyx_moduledef_slots[] = {
{Py_mod_create, (void*)__pyx_pymod_create},
{Py_mod_exec, (void*)__pyx_pymod_exec__anomaly_kernel_cython},
{0, NULL}
};
#endif
static struct PyModuleDef __pyx_moduledef = {
PyModuleDef_HEAD_INIT,
"_anomaly_kernel_cython",
0, /* m_doc */
#if CYTHON_PEP489_MULTI_PHASE_INIT
0, /* m_size */
#else
-1, /* m_size */
#endif
__pyx_methods /* m_methods */,
#if CYTHON_PEP489_MULTI_PHASE_INIT
__pyx_moduledef_slots, /* m_slots */
#else
NULL, /* m_reload */
#endif
NULL, /* m_traverse */
NULL, /* m_clear */
NULL /* m_free */
};
#endif
#ifndef CYTHON_SMALL_CODE
#if defined(__clang__)
#define CYTHON_SMALL_CODE
#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))
#define CYTHON_SMALL_CODE __attribute__((cold))
#else
#define CYTHON_SMALL_CODE
#endif
#endif
static __Pyx_StringTabEntry __pyx_string_tab[] = {
{&__pyx_n_s_ASCII, __pyx_k_ASCII, sizeof(__pyx_k_ASCII), 0, 0, 1, 1},
{&__pyx_kp_s_Buffer_view_does_not_expose_stri, __pyx_k_Buffer_view_does_not_expose_stri, sizeof(__pyx_k_Buffer_view_does_not_expose_stri), 0, 0, 1, 0},
{&__pyx_kp_s_Can_only_create_a_buffer_that_is, __pyx_k_Can_only_create_a_buffer_that_is, sizeof(__pyx_k_Can_only_create_a_buffer_that_is), 0, 0, 1, 0},
{&__pyx_kp_s_Cannot_assign_to_read_only_memor, __pyx_k_Cannot_assign_to_read_only_memor, sizeof(__pyx_k_Cannot_assign_to_read_only_memor), 0, 0, 1, 0},
{&__pyx_kp_s_Cannot_create_writable_memory_vi, __pyx_k_Cannot_create_writable_memory_vi, sizeof(__pyx_k_Cannot_create_writable_memory_vi), 0, 0, 1, 0},
{&__pyx_kp_s_Cannot_index_with_type_s, __pyx_k_Cannot_index_with_type_s, sizeof(__pyx_k_Cannot_index_with_type_s), 0, 0, 1, 0},
{&__pyx_n_s_Ellipsis, __pyx_k_Ellipsis, sizeof(__pyx_k_Ellipsis), 0, 0, 1, 1},
{&__pyx_kp_s_Empty_shape_tuple_for_cython_arr, __pyx_k_Empty_shape_tuple_for_cython_arr, sizeof(__pyx_k_Empty_shape_tuple_for_cython_arr), 0, 0, 1, 0},
{&__pyx_kp_u_Format_string_allocated_too_shor, __pyx_k_Format_string_allocated_too_shor, sizeof(__pyx_k_Format_string_allocated_too_shor), 0, 1, 0, 0},
{&__pyx_kp_u_Format_string_allocated_too_shor_2, __pyx_k_Format_string_allocated_too_shor_2, sizeof(__pyx_k_Format_string_allocated_too_shor_2), 0, 1, 0, 0},
{&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1},
{&__pyx_kp_s_Incompatible_checksums_s_vs_0xb0, __pyx_k_Incompatible_checksums_s_vs_0xb0, sizeof(__pyx_k_Incompatible_checksums_s_vs_0xb0), 0, 0, 1, 0},
{&__pyx_n_s_IndexError, __pyx_k_IndexError, sizeof(__pyx_k_IndexError), 0, 0, 1, 1},
{&__pyx_kp_s_Indirect_dimensions_not_supporte, __pyx_k_Indirect_dimensions_not_supporte, sizeof(__pyx_k_Indirect_dimensions_not_supporte), 0, 0, 1, 0},
{&__pyx_kp_s_Invalid_mode_expected_c_or_fortr, __pyx_k_Invalid_mode_expected_c_or_fortr, sizeof(__pyx_k_Invalid_mode_expected_c_or_fortr), 0, 0, 1, 0},
{&__pyx_kp_s_Invalid_shape_in_axis_d_d, __pyx_k_Invalid_shape_in_axis_d_d, sizeof(__pyx_k_Invalid_shape_in_axis_d_d), 0, 0, 1, 0},
{&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1},
{&__pyx_kp_s_MemoryView_of_r_at_0x_x, __pyx_k_MemoryView_of_r_at_0x_x, sizeof(__pyx_k_MemoryView_of_r_at_0x_x), 0, 0, 1, 0},
{&__pyx_kp_s_MemoryView_of_r_object, __pyx_k_MemoryView_of_r_object, sizeof(__pyx_k_MemoryView_of_r_object), 0, 0, 1, 0},
{&__pyx_kp_u_Non_native_byte_order_not_suppor, __pyx_k_Non_native_byte_order_not_suppor, sizeof(__pyx_k_Non_native_byte_order_not_suppor), 0, 1, 0, 0},
{&__pyx_n_b_O, __pyx_k_O, sizeof(__pyx_k_O), 0, 0, 0, 1},
{&__pyx_kp_s_Out_of_bounds_on_buffer_access_a, __pyx_k_Out_of_bounds_on_buffer_access_a, sizeof(__pyx_k_Out_of_bounds_on_buffer_access_a), 0, 0, 1, 0},
{&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1},
{&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1},
{&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1},
{&__pyx_kp_s_Unable_to_convert_item_to_object, __pyx_k_Unable_to_convert_item_to_object, sizeof(__pyx_k_Unable_to_convert_item_to_object), 0, 0, 1, 0},
{&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1},
{&__pyx_n_s_View_MemoryView, __pyx_k_View_MemoryView, sizeof(__pyx_k_View_MemoryView), 0, 0, 1, 1},
{&__pyx_n_s_allocate_buffer, __pyx_k_allocate_buffer, sizeof(__pyx_k_allocate_buffer), 0, 0, 1, 1},
{&__pyx_n_s_array, __pyx_k_array, sizeof(__pyx_k_array), 0, 0, 1, 1},
{&__pyx_n_s_base, __pyx_k_base, sizeof(__pyx_k_base), 0, 0, 1, 1},
{&__pyx_n_s_bisect, __pyx_k_bisect, sizeof(__pyx_k_bisect), 0, 0, 1, 1},
{&__pyx_n_s_bisect_right, __pyx_k_bisect_right, sizeof(__pyx_k_bisect_right), 0, 0, 1, 1},
{&__pyx_n_s_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 0, 1, 1},
{&__pyx_n_u_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 1, 0, 1},
{&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1},
{&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1},
{&__pyx_kp_s_contiguous_and_direct, __pyx_k_contiguous_and_direct, sizeof(__pyx_k_contiguous_and_direct), 0, 0, 1, 0},
{&__pyx_kp_s_contiguous_and_indirect, __pyx_k_contiguous_and_indirect, sizeof(__pyx_k_contiguous_and_indirect), 0, 0, 1, 0},
{&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1},
{&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1},
{&__pyx_n_s_dtype_is_object, __pyx_k_dtype_is_object, sizeof(__pyx_k_dtype_is_object), 0, 0, 1, 1},
{&__pyx_n_s_encode, __pyx_k_encode, sizeof(__pyx_k_encode), 0, 0, 1, 1},
{&__pyx_n_s_enumerate, __pyx_k_enumerate, sizeof(__pyx_k_enumerate), 0, 0, 1, 1},
{&__pyx_n_s_error, __pyx_k_error, sizeof(__pyx_k_error), 0, 0, 1, 1},
{&__pyx_n_s_f, __pyx_k_f, sizeof(__pyx_k_f), 0, 0, 1, 1},
{&__pyx_n_s_flags, __pyx_k_flags, sizeof(__pyx_k_flags), 0, 0, 1, 1},
{&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1},
{&__pyx_n_s_fortran, __pyx_k_fortran, sizeof(__pyx_k_fortran), 0, 0, 1, 1},
{&__pyx_n_u_fortran, __pyx_k_fortran, sizeof(__pyx_k_fortran), 0, 1, 0, 1},
{&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1},
{&__pyx_kp_s_got_differing_extents_in_dimensi, __pyx_k_got_differing_extents_in_dimensi, sizeof(__pyx_k_got_differing_extents_in_dimensi), 0, 0, 1, 0},
{&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1},
{&__pyx_n_s_id, __pyx_k_id, sizeof(__pyx_k_id), 0, 0, 1, 1},
{&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1},
{&__pyx_n_s_itemsize, __pyx_k_itemsize, sizeof(__pyx_k_itemsize), 0, 0, 1, 1},
{&__pyx_kp_s_itemsize_0_for_cython_array, __pyx_k_itemsize_0_for_cython_array, sizeof(__pyx_k_itemsize_0_for_cython_array), 0, 0, 1, 0},
{&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1},
{&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
{&__pyx_n_s_memview, __pyx_k_memview, sizeof(__pyx_k_memview), 0, 0, 1, 1},
{&__pyx_n_s_mode, __pyx_k_mode, sizeof(__pyx_k_mode), 0, 0, 1, 1},
{&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1},
{&__pyx_n_s_name_2, __pyx_k_name_2, sizeof(__pyx_k_name_2), 0, 0, 1, 1},
{&__pyx_kp_u_ndarray_is_not_C_contiguous, __pyx_k_ndarray_is_not_C_contiguous, sizeof(__pyx_k_ndarray_is_not_C_contiguous), 0, 1, 0, 0},
{&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0},
{&__pyx_n_s_ndim, __pyx_k_ndim, sizeof(__pyx_k_ndim), 0, 0, 1, 1},
{&__pyx_n_s_need_two_end, __pyx_k_need_two_end, sizeof(__pyx_k_need_two_end), 0, 0, 1, 1},
{&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1},
{&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0},
{&__pyx_kp_s_no_median_for_empty_data, __pyx_k_no_median_for_empty_data, sizeof(__pyx_k_no_median_for_empty_data), 0, 0, 1, 0},
{&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1},
{&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1},
{&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0},
{&__pyx_kp_s_numpy_core_umath_failed_to_impor, __pyx_k_numpy_core_umath_failed_to_impor, sizeof(__pyx_k_numpy_core_umath_failed_to_impor), 0, 0, 1, 0},
{&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1},
{&__pyx_n_s_pack, __pyx_k_pack, sizeof(__pyx_k_pack), 0, 0, 1, 1},
{&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1},
{&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1},
{&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1},
{&__pyx_n_s_pyx_getbuffer, __pyx_k_pyx_getbuffer, sizeof(__pyx_k_pyx_getbuffer), 0, 0, 1, 1},
{&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1},
{&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1},
{&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1},
{&__pyx_n_s_pyx_unpickle_Enum, __pyx_k_pyx_unpickle_Enum, sizeof(__pyx_k_pyx_unpickle_Enum), 0, 0, 1, 1},
{&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1},
{&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1},
{&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1},
{&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1},
{&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1},
{&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1},
{&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1},
{&__pyx_n_s_shape, __pyx_k_shape, sizeof(__pyx_k_shape), 0, 0, 1, 1},
{&__pyx_n_s_size, __pyx_k_size, sizeof(__pyx_k_size), 0, 0, 1, 1},
{&__pyx_n_s_start, __pyx_k_start, sizeof(__pyx_k_start), 0, 0, 1, 1},
{&__pyx_n_s_step, __pyx_k_step, sizeof(__pyx_k_step), 0, 0, 1, 1},
{&__pyx_n_s_stop, __pyx_k_stop, sizeof(__pyx_k_stop), 0, 0, 1, 1},
{&__pyx_kp_s_strided_and_direct, __pyx_k_strided_and_direct, sizeof(__pyx_k_strided_and_direct), 0, 0, 1, 0},
{&__pyx_kp_s_strided_and_direct_or_indirect, __pyx_k_strided_and_direct_or_indirect, sizeof(__pyx_k_strided_and_direct_or_indirect), 0, 0, 1, 0},
{&__pyx_kp_s_strided_and_indirect, __pyx_k_strided_and_indirect, sizeof(__pyx_k_strided_and_indirect), 0, 0, 1, 0},
{&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0},
{&__pyx_n_s_struct, __pyx_k_struct, sizeof(__pyx_k_struct), 0, 0, 1, 1},
{&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
{&__pyx_kp_s_unable_to_allocate_array_data, __pyx_k_unable_to_allocate_array_data, sizeof(__pyx_k_unable_to_allocate_array_data), 0, 0, 1, 0},
{&__pyx_kp_s_unable_to_allocate_shape_and_str, __pyx_k_unable_to_allocate_shape_and_str, sizeof(__pyx_k_unable_to_allocate_shape_and_str), 0, 0, 1, 0},
{&__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_k_unknown_dtype_code_in_numpy_pxd, sizeof(__pyx_k_unknown_dtype_code_in_numpy_pxd), 0, 1, 0, 0},
{&__pyx_n_s_unpack, __pyx_k_unpack, sizeof(__pyx_k_unpack), 0, 0, 1, 1},
{&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1},
{&__pyx_n_s_window, __pyx_k_window, sizeof(__pyx_k_window), 0, 0, 1, 1},
{0, 0, 0, 0, 0, 0, 0}
};
static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {
__pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 23, __pyx_L1_error)
__pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(1, 272, __pyx_L1_error)
__pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(1, 856, __pyx_L1_error)
__pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(1, 1038, __pyx_L1_error)
__pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(2, 148, __pyx_L1_error)
__pyx_builtin_enumerate = __Pyx_GetBuiltinName(__pyx_n_s_enumerate); if (!__pyx_builtin_enumerate) __PYX_ERR(2, 151, __pyx_L1_error)
__pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(2, 2, __pyx_L1_error)
__pyx_builtin_Ellipsis = __Pyx_GetBuiltinName(__pyx_n_s_Ellipsis); if (!__pyx_builtin_Ellipsis) __PYX_ERR(2, 404, __pyx_L1_error)
__pyx_builtin_id = __Pyx_GetBuiltinName(__pyx_n_s_id); if (!__pyx_builtin_id) __PYX_ERR(2, 613, __pyx_L1_error)
__pyx_builtin_IndexError = __Pyx_GetBuiltinName(__pyx_n_s_IndexError); if (!__pyx_builtin_IndexError) __PYX_ERR(2, 832, __pyx_L1_error)
return 0;
__pyx_L1_error:;
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
/* "msanomalydetector/_anomaly_kernel_cython.pyx":11
* cdef int mid
* if n == 0:
* raise Exception("no median for empty data") # <<<<<<<<<<<<<<
* if n % 2 == 1:
* return data[i + n // 2]
*/
__pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_no_median_for_empty_data); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple_);
__Pyx_GIVEREF(__pyx_tuple_);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":272
* if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):
* raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<<
*
* if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
*/
__pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 272, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__2);
__Pyx_GIVEREF(__pyx_tuple__2);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":276
* if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)
* and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):
* raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<<
*
* info.buf = PyArray_DATA(self)
*/
__pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 276, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__3);
__Pyx_GIVEREF(__pyx_tuple__3);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":306
* if ((descr.byteorder == c'>' and little_endian) or
* (descr.byteorder == c'<' and not little_endian)):
* raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<<
* if t == NPY_BYTE: f = "b"
* elif t == NPY_UBYTE: f = "B"
*/
__pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 306, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__4);
__Pyx_GIVEREF(__pyx_tuple__4);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":856
*
* if (end - f) - <int>(new_offset - offset[0]) < 15:
* raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<<
*
* if ((child.byteorder == c'>' and little_endian) or
*/
__pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 856, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__5);
__Pyx_GIVEREF(__pyx_tuple__5);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":880
* t = child.type_num
* if end - f < 5:
* raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<<
*
* # Until ticket #99 is fixed, use integers to avoid warnings
*/
__pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 880, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__6);
__Pyx_GIVEREF(__pyx_tuple__6);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1038
* _import_array()
* except Exception:
* raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<<
*
* cdef inline int import_umath() except -1:
*/
__pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 1038, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__7);
__Pyx_GIVEREF(__pyx_tuple__7);
/* "../../../Anaconda3/envs/test1/lib/site-packages/Cython/Includes/numpy/__init__.pxd":1044
* _import_umath()
* except Exception:
* raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<<
*
* cdef inline int import_ufunc() except -1:
*/
__pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 1044, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__8);
__Pyx_GIVEREF(__pyx_tuple__8);
/* "View.MemoryView":133
*
* if not self.ndim:
* raise ValueError("Empty shape tuple for cython.array") # <<<<<<<<<<<<<<
*
* if itemsize <= 0:
*/
__pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_Empty_shape_tuple_for_cython_arr); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(2, 133, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__9);
__Pyx_GIVEREF(__pyx_tuple__9);
/* "View.MemoryView":136
*
* if itemsize <= 0:
* raise ValueError("itemsize <= 0 for cython.array") # <<<<<<<<<<<<<<
*
* if not isinstance(format, bytes):
*/
__pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_s_itemsize_0_for_cython_array); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(2, 136, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__10);
__Pyx_GIVEREF(__pyx_tuple__10);
/* "View.MemoryView":148
*
* if not self._shape:
* raise MemoryError("unable to allocate shape and strides.") # <<<<<<<<<<<<<<
*
*
*/
__pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_unable_to_allocate_shape_and_str); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(2, 148, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__11);
__Pyx_GIVEREF(__pyx_tuple__11);
/* "View.MemoryView":176
* self.data = <char *>malloc(self.len)
* if not self.data:
* raise MemoryError("unable to allocate array data.") # <<<<<<<<<<<<<<
*
* if self.dtype_is_object:
*/
__pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_s_unable_to_allocate_array_data); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(2, 176, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__12);
__Pyx_GIVEREF(__pyx_tuple__12);
/* "View.MemoryView":192
* bufmode = PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
* if not (flags & bufmode):
* raise ValueError("Can only create a buffer that is contiguous in memory.") # <<<<<<<<<<<<<<
* info.buf = self.data
* info.len = self.len
*/
__pyx_tuple__13 = PyTuple_Pack(1, __pyx_kp_s_Can_only_create_a_buffer_that_is); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(2, 192, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__13);
__Pyx_GIVEREF(__pyx_tuple__13);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_tuple__14 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(2, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__14);
__Pyx_GIVEREF(__pyx_tuple__14);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_tuple__15 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(2, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__15);
__Pyx_GIVEREF(__pyx_tuple__15);
/* "View.MemoryView":418
* def __setitem__(memoryview self, object index, object value):
* if self.view.readonly:
* raise TypeError("Cannot assign to read-only memoryview") # <<<<<<<<<<<<<<
*
* have_slices, index = _unellipsify(index, self.view.ndim)
*/
__pyx_tuple__16 = PyTuple_Pack(1, __pyx_kp_s_Cannot_assign_to_read_only_memor); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(2, 418, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__16);
__Pyx_GIVEREF(__pyx_tuple__16);
/* "View.MemoryView":495
* result = struct.unpack(self.view.format, bytesitem)
* except struct.error:
* raise ValueError("Unable to convert item to object") # <<<<<<<<<<<<<<
* else:
* if len(self.view.format) == 1:
*/
__pyx_tuple__17 = PyTuple_Pack(1, __pyx_kp_s_Unable_to_convert_item_to_object); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(2, 495, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__17);
__Pyx_GIVEREF(__pyx_tuple__17);
/* "View.MemoryView":520
* def __getbuffer__(self, Py_buffer *info, int flags):
* if flags & PyBUF_WRITABLE and self.view.readonly:
* raise ValueError("Cannot create writable memory view from read-only memoryview") # <<<<<<<<<<<<<<
*
* if flags & PyBUF_ND:
*/
__pyx_tuple__18 = PyTuple_Pack(1, __pyx_kp_s_Cannot_create_writable_memory_vi); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(2, 520, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__18);
__Pyx_GIVEREF(__pyx_tuple__18);
/* "View.MemoryView":570
* if self.view.strides == NULL:
*
* raise ValueError("Buffer view does not expose strides") # <<<<<<<<<<<<<<
*
* return tuple([stride for stride in self.view.strides[:self.view.ndim]])
*/
__pyx_tuple__19 = PyTuple_Pack(1, __pyx_kp_s_Buffer_view_does_not_expose_stri); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(2, 570, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__19);
__Pyx_GIVEREF(__pyx_tuple__19);
/* "View.MemoryView":577
* def suboffsets(self):
* if self.view.suboffsets == NULL:
* return (-1,) * self.view.ndim # <<<<<<<<<<<<<<
*
* return tuple([suboffset for suboffset in self.view.suboffsets[:self.view.ndim]])
*/
__pyx_tuple__20 = PyTuple_New(1); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(2, 577, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__20);
__Pyx_INCREF(__pyx_int_neg_1);
__Pyx_GIVEREF(__pyx_int_neg_1);
PyTuple_SET_ITEM(__pyx_tuple__20, 0, __pyx_int_neg_1);
__Pyx_GIVEREF(__pyx_tuple__20);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_tuple__21 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__21)) __PYX_ERR(2, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__21);
__Pyx_GIVEREF(__pyx_tuple__21);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_tuple__22 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(2, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__22);
__Pyx_GIVEREF(__pyx_tuple__22);
/* "View.MemoryView":682
* if item is Ellipsis:
* if not seen_ellipsis:
* result.extend([slice(None)] * (ndim - len(tup) + 1)) # <<<<<<<<<<<<<<
* seen_ellipsis = True
* else:
*/
__pyx_slice__23 = PySlice_New(Py_None, Py_None, Py_None); if (unlikely(!__pyx_slice__23)) __PYX_ERR(2, 682, __pyx_L1_error)
__Pyx_GOTREF(__pyx_slice__23);
__Pyx_GIVEREF(__pyx_slice__23);
/* "View.MemoryView":703
* for suboffset in suboffsets[:ndim]:
* if suboffset >= 0:
* raise ValueError("Indirect dimensions not supported") # <<<<<<<<<<<<<<
*
*
*/
__pyx_tuple__24 = PyTuple_Pack(1, __pyx_kp_s_Indirect_dimensions_not_supporte); if (unlikely(!__pyx_tuple__24)) __PYX_ERR(2, 703, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__24);
__Pyx_GIVEREF(__pyx_tuple__24);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_tuple__25 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(2, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__25);
__Pyx_GIVEREF(__pyx_tuple__25);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_tuple__26 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__26)) __PYX_ERR(2, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__26);
__Pyx_GIVEREF(__pyx_tuple__26);
/* "View.MemoryView":286
* return self.name
*
* cdef generic = Enum("<strided and direct or indirect>") # <<<<<<<<<<<<<<
* cdef strided = Enum("<strided and direct>") # default
* cdef indirect = Enum("<strided and indirect>")
*/
__pyx_tuple__27 = PyTuple_Pack(1, __pyx_kp_s_strided_and_direct_or_indirect); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(2, 286, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__27);
__Pyx_GIVEREF(__pyx_tuple__27);
/* "View.MemoryView":287
*
* cdef generic = Enum("<strided and direct or indirect>")
* cdef strided = Enum("<strided and direct>") # default # <<<<<<<<<<<<<<
* cdef indirect = Enum("<strided and indirect>")
*
*/
__pyx_tuple__28 = PyTuple_Pack(1, __pyx_kp_s_strided_and_direct); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(2, 287, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__28);
__Pyx_GIVEREF(__pyx_tuple__28);
/* "View.MemoryView":288
* cdef generic = Enum("<strided and direct or indirect>")
* cdef strided = Enum("<strided and direct>") # default
* cdef indirect = Enum("<strided and indirect>") # <<<<<<<<<<<<<<
*
*
*/
__pyx_tuple__29 = PyTuple_Pack(1, __pyx_kp_s_strided_and_indirect); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(2, 288, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__29);
__Pyx_GIVEREF(__pyx_tuple__29);
/* "View.MemoryView":291
*
*
* cdef contiguous = Enum("<contiguous and direct>") # <<<<<<<<<<<<<<
* cdef indirect_contiguous = Enum("<contiguous and indirect>")
*
*/
__pyx_tuple__30 = PyTuple_Pack(1, __pyx_kp_s_contiguous_and_direct); if (unlikely(!__pyx_tuple__30)) __PYX_ERR(2, 291, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__30);
__Pyx_GIVEREF(__pyx_tuple__30);
/* "View.MemoryView":292
*
* cdef contiguous = Enum("<contiguous and direct>")
* cdef indirect_contiguous = Enum("<contiguous and indirect>") # <<<<<<<<<<<<<<
*
*
*/
__pyx_tuple__31 = PyTuple_Pack(1, __pyx_kp_s_contiguous_and_indirect); if (unlikely(!__pyx_tuple__31)) __PYX_ERR(2, 292, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__31);
__Pyx_GIVEREF(__pyx_tuple__31);
/* "(tree fragment)":1
* def __pyx_unpickle_Enum(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
* cdef object __pyx_PickleError
* cdef object __pyx_result
*/
__pyx_tuple__32 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__32)) __PYX_ERR(2, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__32);
__Pyx_GIVEREF(__pyx_tuple__32);
__pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__32, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_Enum, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(2, 1, __pyx_L1_error)
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_RefNannyFinishContext();
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {
if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
__pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_int_184977713 = PyInt_FromLong(184977713L); if (unlikely(!__pyx_int_184977713)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_int_neg_1 = PyInt_FromLong(-1); if (unlikely(!__pyx_int_neg_1)) __PYX_ERR(0, 1, __pyx_L1_error)
return 0;
__pyx_L1_error:;
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/
static int __Pyx_modinit_global_init_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0);
/*--- Global init code ---*/
generic = Py_None; Py_INCREF(Py_None);
strided = Py_None; Py_INCREF(Py_None);
indirect = Py_None; Py_INCREF(Py_None);
contiguous = Py_None; Py_INCREF(Py_None);
indirect_contiguous = Py_None; Py_INCREF(Py_None);
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_variable_export_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0);
/*--- Variable export code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_function_export_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0);
/*--- Function export code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_type_init_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
/*--- Type init code ---*/
__pyx_vtabptr_array = &__pyx_vtable_array;
__pyx_vtable_array.get_memview = (PyObject *(*)(struct __pyx_array_obj *))__pyx_array_get_memview;
if (PyType_Ready(&__pyx_type___pyx_array) < 0) __PYX_ERR(2, 105, __pyx_L1_error)
#if PY_VERSION_HEX < 0x030800B1
__pyx_type___pyx_array.tp_print = 0;
#endif
if (__Pyx_SetVtable(__pyx_type___pyx_array.tp_dict, __pyx_vtabptr_array) < 0) __PYX_ERR(2, 105, __pyx_L1_error)
if (__Pyx_setup_reduce((PyObject*)&__pyx_type___pyx_array) < 0) __PYX_ERR(2, 105, __pyx_L1_error)
__pyx_array_type = &__pyx_type___pyx_array;
if (PyType_Ready(&__pyx_type___pyx_MemviewEnum) < 0) __PYX_ERR(2, 279, __pyx_L1_error)
#if PY_VERSION_HEX < 0x030800B1
__pyx_type___pyx_MemviewEnum.tp_print = 0;
#endif
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type___pyx_MemviewEnum.tp_dictoffset && __pyx_type___pyx_MemviewEnum.tp_getattro == PyObject_GenericGetAttr)) {
__pyx_type___pyx_MemviewEnum.tp_getattro = __Pyx_PyObject_GenericGetAttr;
}
if (__Pyx_setup_reduce((PyObject*)&__pyx_type___pyx_MemviewEnum) < 0) __PYX_ERR(2, 279, __pyx_L1_error)
__pyx_MemviewEnum_type = &__pyx_type___pyx_MemviewEnum;
__pyx_vtabptr_memoryview = &__pyx_vtable_memoryview;
__pyx_vtable_memoryview.get_item_pointer = (char *(*)(struct __pyx_memoryview_obj *, PyObject *))__pyx_memoryview_get_item_pointer;
__pyx_vtable_memoryview.is_slice = (PyObject *(*)(struct __pyx_memoryview_obj *, PyObject *))__pyx_memoryview_is_slice;
__pyx_vtable_memoryview.setitem_slice_assignment = (PyObject *(*)(struct __pyx_memoryview_obj *, PyObject *, PyObject *))__pyx_memoryview_setitem_slice_assignment;
__pyx_vtable_memoryview.setitem_slice_assign_scalar = (PyObject *(*)(struct __pyx_memoryview_obj *, struct __pyx_memoryview_obj *, PyObject *))__pyx_memoryview_setitem_slice_assign_scalar;
__pyx_vtable_memoryview.setitem_indexed = (PyObject *(*)(struct __pyx_memoryview_obj *, PyObject *, PyObject *))__pyx_memoryview_setitem_indexed;
__pyx_vtable_memoryview.convert_item_to_object = (PyObject *(*)(struct __pyx_memoryview_obj *, char *))__pyx_memoryview_convert_item_to_object;
__pyx_vtable_memoryview.assign_item_from_object = (PyObject *(*)(struct __pyx_memoryview_obj *, char *, PyObject *))__pyx_memoryview_assign_item_from_object;
if (PyType_Ready(&__pyx_type___pyx_memoryview) < 0) __PYX_ERR(2, 330, __pyx_L1_error)
#if PY_VERSION_HEX < 0x030800B1
__pyx_type___pyx_memoryview.tp_print = 0;
#endif
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type___pyx_memoryview.tp_dictoffset && __pyx_type___pyx_memoryview.tp_getattro == PyObject_GenericGetAttr)) {
__pyx_type___pyx_memoryview.tp_getattro = __Pyx_PyObject_GenericGetAttr;
}
if (__Pyx_SetVtable(__pyx_type___pyx_memoryview.tp_dict, __pyx_vtabptr_memoryview) < 0) __PYX_ERR(2, 330, __pyx_L1_error)
if (__Pyx_setup_reduce((PyObject*)&__pyx_type___pyx_memoryview) < 0) __PYX_ERR(2, 330, __pyx_L1_error)
__pyx_memoryview_type = &__pyx_type___pyx_memoryview;
__pyx_vtabptr__memoryviewslice = &__pyx_vtable__memoryviewslice;
__pyx_vtable__memoryviewslice.__pyx_base = *__pyx_vtabptr_memoryview;
__pyx_vtable__memoryviewslice.__pyx_base.convert_item_to_object = (PyObject *(*)(struct __pyx_memoryview_obj *, char *))__pyx_memoryviewslice_convert_item_to_object;
__pyx_vtable__memoryviewslice.__pyx_base.assign_item_from_object = (PyObject *(*)(struct __pyx_memoryview_obj *, char *, PyObject *))__pyx_memoryviewslice_assign_item_from_object;
__pyx_type___pyx_memoryviewslice.tp_base = __pyx_memoryview_type;
if (PyType_Ready(&__pyx_type___pyx_memoryviewslice) < 0) __PYX_ERR(2, 965, __pyx_L1_error)
#if PY_VERSION_HEX < 0x030800B1
__pyx_type___pyx_memoryviewslice.tp_print = 0;
#endif
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type___pyx_memoryviewslice.tp_dictoffset && __pyx_type___pyx_memoryviewslice.tp_getattro == PyObject_GenericGetAttr)) {
__pyx_type___pyx_memoryviewslice.tp_getattro = __Pyx_PyObject_GenericGetAttr;
}
if (__Pyx_SetVtable(__pyx_type___pyx_memoryviewslice.tp_dict, __pyx_vtabptr__memoryviewslice) < 0) __PYX_ERR(2, 965, __pyx_L1_error)
if (__Pyx_setup_reduce((PyObject*)&__pyx_type___pyx_memoryviewslice) < 0) __PYX_ERR(2, 965, __pyx_L1_error)
__pyx_memoryviewslice_type = &__pyx_type___pyx_memoryviewslice;
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_RefNannyFinishContext();
return -1;
}
static int __Pyx_modinit_type_import_code(void) {
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
/*--- Type import code ---*/
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 9, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type",
#if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000
sizeof(PyTypeObject),
#else
sizeof(PyHeapTypeObject),
#endif
__Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(3, 9, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = PyImport_ImportModule("numpy"); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 206, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_ptype_5numpy_dtype = __Pyx_ImportType(__pyx_t_1, "numpy", "dtype", sizeof(PyArray_Descr), __Pyx_ImportType_CheckSize_Ignore);
if (!__pyx_ptype_5numpy_dtype) __PYX_ERR(1, 206, __pyx_L1_error)
__pyx_ptype_5numpy_flatiter = __Pyx_ImportType(__pyx_t_1, "numpy", "flatiter", sizeof(PyArrayIterObject), __Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_5numpy_flatiter) __PYX_ERR(1, 229, __pyx_L1_error)
__pyx_ptype_5numpy_broadcast = __Pyx_ImportType(__pyx_t_1, "numpy", "broadcast", sizeof(PyArrayMultiIterObject), __Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_5numpy_broadcast) __PYX_ERR(1, 233, __pyx_L1_error)
__pyx_ptype_5numpy_ndarray = __Pyx_ImportType(__pyx_t_1, "numpy", "ndarray", sizeof(PyArrayObject), __Pyx_ImportType_CheckSize_Ignore);
if (!__pyx_ptype_5numpy_ndarray) __PYX_ERR(1, 242, __pyx_L1_error)
__pyx_ptype_5numpy_ufunc = __Pyx_ImportType(__pyx_t_1, "numpy", "ufunc", sizeof(PyUFuncObject), __Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_5numpy_ufunc) __PYX_ERR(1, 918, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_RefNannyFinishContext();
return -1;
}
static int __Pyx_modinit_variable_import_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0);
/*--- Variable import code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_function_import_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0);
/*--- Function import code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
#if PY_MAJOR_VERSION < 3
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC void
#else
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#endif
#else
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyObject *
#else
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#endif
#endif
#if PY_MAJOR_VERSION < 3
__Pyx_PyMODINIT_FUNC init_anomaly_kernel_cython(void) CYTHON_SMALL_CODE; /*proto*/
__Pyx_PyMODINIT_FUNC init_anomaly_kernel_cython(void)
#else
__Pyx_PyMODINIT_FUNC PyInit__anomaly_kernel_cython(void) CYTHON_SMALL_CODE; /*proto*/
__Pyx_PyMODINIT_FUNC PyInit__anomaly_kernel_cython(void)
#if CYTHON_PEP489_MULTI_PHASE_INIT
{
return PyModuleDef_Init(&__pyx_moduledef);
}
static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
#if PY_VERSION_HEX >= 0x030700A1
static PY_INT64_T main_interpreter_id = -1;
PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp);
if (main_interpreter_id == -1) {
main_interpreter_id = current_id;
return (unlikely(current_id == -1)) ? -1 : 0;
} else if (unlikely(main_interpreter_id != current_id))
#else
static PyInterpreterState *main_interpreter = NULL;
PyInterpreterState *current_interpreter = PyThreadState_Get()->interp;
if (!main_interpreter) {
main_interpreter = current_interpreter;
} else if (unlikely(main_interpreter != current_interpreter))
#endif
{
PyErr_SetString(
PyExc_ImportError,
"Interpreter change detected - this module can only be loaded into one interpreter per process.");
return -1;
}
return 0;
}
static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) {
PyObject *value = PyObject_GetAttrString(spec, from_name);
int result = 0;
if (likely(value)) {
if (allow_none || value != Py_None) {
result = PyDict_SetItemString(moddict, to_name, value);
}
Py_DECREF(value);
} else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Clear();
} else {
result = -1;
}
return result;
}
static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {
PyObject *module = NULL, *moddict, *modname;
if (__Pyx_check_single_interpreter())
return NULL;
if (__pyx_m)
return __Pyx_NewRef(__pyx_m);
modname = PyObject_GetAttrString(spec, "name");
if (unlikely(!modname)) goto bad;
module = PyModule_NewObject(modname);
Py_DECREF(modname);
if (unlikely(!module)) goto bad;
moddict = PyModule_GetDict(module);
if (unlikely(!moddict)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad;
return module;
bad:
Py_XDECREF(module);
return NULL;
}
static CYTHON_SMALL_CODE int __pyx_pymod_exec__anomaly_kernel_cython(PyObject *__pyx_pyinit_module)
#endif
#endif
{
PyObject *__pyx_t_1 = NULL;
static PyThread_type_lock __pyx_t_2[8];
__Pyx_RefNannyDeclarations
#if CYTHON_PEP489_MULTI_PHASE_INIT
if (__pyx_m) {
if (__pyx_m == __pyx_pyinit_module) return 0;
PyErr_SetString(PyExc_RuntimeError, "Module '_anomaly_kernel_cython' has already been imported. Re-initialisation is not supported.");
return -1;
}
#elif PY_MAJOR_VERSION >= 3
if (__pyx_m) return __Pyx_NewRef(__pyx_m);
#endif
#if CYTHON_REFNANNY
__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
if (!__Pyx_RefNanny) {
PyErr_Clear();
__Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
if (!__Pyx_RefNanny)
Py_FatalError("failed to import 'refnanny' module");
}
#endif
__Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit__anomaly_kernel_cython(void)", 0);
if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#ifdef __Pxy_PyFrame_Initialize_Offsets
__Pxy_PyFrame_Initialize_Offsets();
#endif
__pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error)
__pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error)
#ifdef __Pyx_CyFunction_USED
if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_FusedFunction_USED
if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_Coroutine_USED
if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_Generator_USED
if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_AsyncGen_USED
if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_StopAsyncIteration_USED
if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
/*--- Library function declarations ---*/
/*--- Threads initialization code ---*/
#if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
#ifdef WITH_THREAD /* Python build with threading support? */
PyEval_InitThreads();
#endif
#endif
/*--- Module creation code ---*/
#if CYTHON_PEP489_MULTI_PHASE_INIT
__pyx_m = __pyx_pyinit_module;
Py_INCREF(__pyx_m);
#else
#if PY_MAJOR_VERSION < 3
__pyx_m = Py_InitModule4("_anomaly_kernel_cython", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
#else
__pyx_m = PyModule_Create(&__pyx_moduledef);
#endif
if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
Py_INCREF(__pyx_d);
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
Py_INCREF(__pyx_b);
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
Py_INCREF(__pyx_cython_runtime);
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
/*--- Initialize various global constants etc. ---*/
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
if (__pyx_module_is_main_msanomalydetector___anomaly_kernel_cython) {
if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name_2, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
}
#if PY_MAJOR_VERSION >= 3
{
PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error)
if (!PyDict_GetItemString(modules, "msanomalydetector._anomaly_kernel_cython")) {
if (unlikely(PyDict_SetItemString(modules, "msanomalydetector._anomaly_kernel_cython", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
}
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
/*--- Global type/function init code ---*/
(void)__Pyx_modinit_global_init_code();
(void)__Pyx_modinit_variable_export_code();
(void)__Pyx_modinit_function_export_code();
if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error;
if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error;
(void)__Pyx_modinit_variable_import_code();
(void)__Pyx_modinit_function_import_code();
/*--- Execution code ---*/
#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
/* "msanomalydetector/_anomaly_kernel_cython.pyx":1
* import numpy as np # <<<<<<<<<<<<<<
* cimport numpy as np
* import array
*/
__pyx_t_1 = __Pyx_Import(__pyx_n_s_numpy, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_np, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":3
* import numpy as np
* cimport numpy as np
* import array # <<<<<<<<<<<<<<
* import bisect
*
*/
__pyx_t_1 = __Pyx_Import(__pyx_n_s_array, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_array, __pyx_t_1) < 0) __PYX_ERR(0, 3, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":4
* cimport numpy as np
* import array
* import bisect # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_1 = __Pyx_Import(__pyx_n_s_bisect, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_bisect, __pyx_t_1) < 0) __PYX_ERR(0, 4, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "msanomalydetector/_anomaly_kernel_cython.pyx":1
* import numpy as np # <<<<<<<<<<<<<<
* cimport numpy as np
* import array
*/
__pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "View.MemoryView":209
* info.obj = self
*
* __pyx_getbuffer = capsule(<void *> &__pyx_array_getbuffer, "getbuffer(obj, view, flags)") # <<<<<<<<<<<<<<
*
* def __dealloc__(array self):
*/
__pyx_t_1 = __pyx_capsule_create(((void *)(&__pyx_array_getbuffer)), ((char *)"getbuffer(obj, view, flags)")); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 209, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem((PyObject *)__pyx_array_type->tp_dict, __pyx_n_s_pyx_getbuffer, __pyx_t_1) < 0) __PYX_ERR(2, 209, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
PyType_Modified(__pyx_array_type);
/* "View.MemoryView":286
* return self.name
*
* cdef generic = Enum("<strided and direct or indirect>") # <<<<<<<<<<<<<<
* cdef strided = Enum("<strided and direct>") # default
* cdef indirect = Enum("<strided and indirect>")
*/
__pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_MemviewEnum_type), __pyx_tuple__27, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 286, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_XGOTREF(generic);
__Pyx_DECREF_SET(generic, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__pyx_t_1 = 0;
/* "View.MemoryView":287
*
* cdef generic = Enum("<strided and direct or indirect>")
* cdef strided = Enum("<strided and direct>") # default # <<<<<<<<<<<<<<
* cdef indirect = Enum("<strided and indirect>")
*
*/
__pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_MemviewEnum_type), __pyx_tuple__28, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 287, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_XGOTREF(strided);
__Pyx_DECREF_SET(strided, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__pyx_t_1 = 0;
/* "View.MemoryView":288
* cdef generic = Enum("<strided and direct or indirect>")
* cdef strided = Enum("<strided and direct>") # default
* cdef indirect = Enum("<strided and indirect>") # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_MemviewEnum_type), __pyx_tuple__29, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 288, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_XGOTREF(indirect);
__Pyx_DECREF_SET(indirect, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__pyx_t_1 = 0;
/* "View.MemoryView":291
*
*
* cdef contiguous = Enum("<contiguous and direct>") # <<<<<<<<<<<<<<
* cdef indirect_contiguous = Enum("<contiguous and indirect>")
*
*/
__pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_MemviewEnum_type), __pyx_tuple__30, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 291, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_XGOTREF(contiguous);
__Pyx_DECREF_SET(contiguous, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__pyx_t_1 = 0;
/* "View.MemoryView":292
*
* cdef contiguous = Enum("<contiguous and direct>")
* cdef indirect_contiguous = Enum("<contiguous and indirect>") # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_MemviewEnum_type), __pyx_tuple__31, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 292, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_XGOTREF(indirect_contiguous);
__Pyx_DECREF_SET(indirect_contiguous, __pyx_t_1);
__Pyx_GIVEREF(__pyx_t_1);
__pyx_t_1 = 0;
/* "View.MemoryView":316
*
* DEF THREAD_LOCKS_PREALLOCATED = 8
* cdef int __pyx_memoryview_thread_locks_used = 0 # <<<<<<<<<<<<<<
* cdef PyThread_type_lock[THREAD_LOCKS_PREALLOCATED] __pyx_memoryview_thread_locks = [
* PyThread_allocate_lock(),
*/
__pyx_memoryview_thread_locks_used = 0;
/* "View.MemoryView":317
* DEF THREAD_LOCKS_PREALLOCATED = 8
* cdef int __pyx_memoryview_thread_locks_used = 0
* cdef PyThread_type_lock[THREAD_LOCKS_PREALLOCATED] __pyx_memoryview_thread_locks = [ # <<<<<<<<<<<<<<
* PyThread_allocate_lock(),
* PyThread_allocate_lock(),
*/
__pyx_t_2[0] = PyThread_allocate_lock();
__pyx_t_2[1] = PyThread_allocate_lock();
__pyx_t_2[2] = PyThread_allocate_lock();
__pyx_t_2[3] = PyThread_allocate_lock();
__pyx_t_2[4] = PyThread_allocate_lock();
__pyx_t_2[5] = PyThread_allocate_lock();
__pyx_t_2[6] = PyThread_allocate_lock();
__pyx_t_2[7] = PyThread_allocate_lock();
memcpy(&(__pyx_memoryview_thread_locks[0]), __pyx_t_2, sizeof(__pyx_memoryview_thread_locks[0]) * (8));
/* "View.MemoryView":549
* info.obj = self
*
* __pyx_getbuffer = capsule(<void *> &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)") # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_1 = __pyx_capsule_create(((void *)(&__pyx_memoryview_getbuffer)), ((char *)"getbuffer(obj, view, flags)")); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 549, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem((PyObject *)__pyx_memoryview_type->tp_dict, __pyx_n_s_pyx_getbuffer, __pyx_t_1) < 0) __PYX_ERR(2, 549, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
PyType_Modified(__pyx_memoryview_type);
/* "View.MemoryView":995
* return self.from_object
*
* __pyx_getbuffer = capsule(<void *> &__pyx_memoryview_getbuffer, "getbuffer(obj, view, flags)") # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_1 = __pyx_capsule_create(((void *)(&__pyx_memoryview_getbuffer)), ((char *)"getbuffer(obj, view, flags)")); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 995, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem((PyObject *)__pyx_memoryviewslice_type->tp_dict, __pyx_n_s_pyx_getbuffer, __pyx_t_1) < 0) __PYX_ERR(2, 995, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
PyType_Modified(__pyx_memoryviewslice_type);
/* "(tree fragment)":1
* def __pyx_unpickle_Enum(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
* cdef object __pyx_PickleError
* cdef object __pyx_result
*/
__pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_15View_dot_MemoryView_1__pyx_unpickle_Enum, NULL, __pyx_n_s_View_MemoryView); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_Enum, __pyx_t_1) < 0) __PYX_ERR(2, 1, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "(tree fragment)":11
* __pyx_unpickle_Enum__set_state(<Enum> __pyx_result, __pyx_state)
* return __pyx_result
* cdef __pyx_unpickle_Enum__set_state(Enum __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<<
* __pyx_result.name = __pyx_state[0]
* if len(__pyx_state) > 1 and hasattr(__pyx_result, '__dict__'):
*/
/*--- Wrapped vars code ---*/
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
if (__pyx_m) {
if (__pyx_d) {
__Pyx_AddTraceback("init msanomalydetector._anomaly_kernel_cython", __pyx_clineno, __pyx_lineno, __pyx_filename);
}
Py_CLEAR(__pyx_m);
} else if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_ImportError, "init msanomalydetector._anomaly_kernel_cython");
}
__pyx_L0:;
__Pyx_RefNannyFinishContext();
#if CYTHON_PEP489_MULTI_PHASE_INIT
return (__pyx_m != NULL) ? 0 : -1;
#elif PY_MAJOR_VERSION >= 3
return __pyx_m;
#else
return;
#endif
}
/* --- Runtime support code --- */
/* Refnanny */
#if CYTHON_REFNANNY
static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
PyObject *m = NULL, *p = NULL;
void *r = NULL;
m = PyImport_ImportModule(modname);
if (!m) goto end;
p = PyObject_GetAttrString(m, "RefNannyAPI");
if (!p) goto end;
r = PyLong_AsVoidPtr(p);
end:
Py_XDECREF(p);
Py_XDECREF(m);
return (__Pyx_RefNannyAPIStruct *)r;
}
#endif
/* PyObjectGetAttrStr */
#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro))
return tp->tp_getattro(obj, attr_name);
#if PY_MAJOR_VERSION < 3
if (likely(tp->tp_getattr))
return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));
#endif
return PyObject_GetAttr(obj, attr_name);
}
#endif
/* GetBuiltinName */
static PyObject *__Pyx_GetBuiltinName(PyObject *name) {
PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name);
if (unlikely(!result)) {
PyErr_Format(PyExc_NameError,
#if PY_MAJOR_VERSION >= 3
"name '%U' is not defined", name);
#else
"name '%.200s' is not defined", PyString_AS_STRING(name));
#endif
}
return result;
}
/* PyObjectCall */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
PyObject *result;
ternaryfunc call = func->ob_type->tp_call;
if (unlikely(!call))
return PyObject_Call(func, arg, kw);
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
return NULL;
result = (*call)(func, arg, kw);
Py_LeaveRecursiveCall();
if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
PyErr_SetString(
PyExc_SystemError,
"NULL result without error in PyObject_Call");
}
return result;
}
#endif
/* PyErrFetchRestore */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
tmp_type = tstate->curexc_type;
tmp_value = tstate->curexc_value;
tmp_tb = tstate->curexc_traceback;
tstate->curexc_type = type;
tstate->curexc_value = value;
tstate->curexc_traceback = tb;
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
}
static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
*type = tstate->curexc_type;
*value = tstate->curexc_value;
*tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
}
#endif
/* RaiseException */
#if PY_MAJOR_VERSION < 3
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb,
CYTHON_UNUSED PyObject *cause) {
__Pyx_PyThreadState_declare
Py_XINCREF(type);
if (!value || value == Py_None)
value = NULL;
else
Py_INCREF(value);
if (!tb || tb == Py_None)
tb = NULL;
else {
Py_INCREF(tb);
if (!PyTraceBack_Check(tb)) {
PyErr_SetString(PyExc_TypeError,
"raise: arg 3 must be a traceback or None");
goto raise_error;
}
}
if (PyType_Check(type)) {
#if CYTHON_COMPILING_IN_PYPY
if (!value) {
Py_INCREF(Py_None);
value = Py_None;
}
#endif
PyErr_NormalizeException(&type, &value, &tb);
} else {
if (value) {
PyErr_SetString(PyExc_TypeError,
"instance exception may not have a separate value");
goto raise_error;
}
value = type;
type = (PyObject*) Py_TYPE(type);
Py_INCREF(type);
if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
PyErr_SetString(PyExc_TypeError,
"raise: exception class must be a subclass of BaseException");
goto raise_error;
}
}
__Pyx_PyThreadState_assign
__Pyx_ErrRestore(type, value, tb);
return;
raise_error:
Py_XDECREF(value);
Py_XDECREF(type);
Py_XDECREF(tb);
return;
}
#else
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
PyObject* owned_instance = NULL;
if (tb == Py_None) {
tb = 0;
} else if (tb && !PyTraceBack_Check(tb)) {
PyErr_SetString(PyExc_TypeError,
"raise: arg 3 must be a traceback or None");
goto bad;
}
if (value == Py_None)
value = 0;
if (PyExceptionInstance_Check(type)) {
if (value) {
PyErr_SetString(PyExc_TypeError,
"instance exception may not have a separate value");
goto bad;
}
value = type;
type = (PyObject*) Py_TYPE(value);
} else if (PyExceptionClass_Check(type)) {
PyObject *instance_class = NULL;
if (value && PyExceptionInstance_Check(value)) {
instance_class = (PyObject*) Py_TYPE(value);
if (instance_class != type) {
int is_subclass = PyObject_IsSubclass(instance_class, type);
if (!is_subclass) {
instance_class = NULL;
} else if (unlikely(is_subclass == -1)) {
goto bad;
} else {
type = instance_class;
}
}
}
if (!instance_class) {
PyObject *args;
if (!value)
args = PyTuple_New(0);
else if (PyTuple_Check(value)) {
Py_INCREF(value);
args = value;
} else
args = PyTuple_Pack(1, value);
if (!args)
goto bad;
owned_instance = PyObject_Call(type, args, NULL);
Py_DECREF(args);
if (!owned_instance)
goto bad;
value = owned_instance;
if (!PyExceptionInstance_Check(value)) {
PyErr_Format(PyExc_TypeError,
"calling %R should have returned an instance of "
"BaseException, not %R",
type, Py_TYPE(value));
goto bad;
}
}
} else {
PyErr_SetString(PyExc_TypeError,
"raise: exception class must be a subclass of BaseException");
goto bad;
}
if (cause) {
PyObject *fixed_cause;
if (cause == Py_None) {
fixed_cause = NULL;
} else if (PyExceptionClass_Check(cause)) {
fixed_cause = PyObject_CallObject(cause, NULL);
if (fixed_cause == NULL)
goto bad;
} else if (PyExceptionInstance_Check(cause)) {
fixed_cause = cause;
Py_INCREF(fixed_cause);
} else {
PyErr_SetString(PyExc_TypeError,
"exception causes must derive from "
"BaseException");
goto bad;
}
PyException_SetCause(value, fixed_cause);
}
PyErr_SetObject(type, value);
if (tb) {
#if CYTHON_COMPILING_IN_PYPY
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb);
Py_INCREF(tb);
PyErr_Restore(tmp_type, tmp_value, tb);
Py_XDECREF(tmp_tb);
#else
PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject* tmp_tb = tstate->curexc_traceback;
if (tb != tmp_tb) {
Py_INCREF(tb);
tstate->curexc_traceback = tb;
Py_XDECREF(tmp_tb);
}
#endif
}
bad:
Py_XDECREF(owned_instance);
return;
}
#endif
/* None */
static CYTHON_INLINE long __Pyx_mod_long(long a, long b) {
long r = a % b;
r += ((r != 0) & ((r ^ b) < 0)) * b;
return r;
}
/* None */
static CYTHON_INLINE long __Pyx_div_long(long a, long b) {
long q = a / b;
long r = a - q*b;
q -= ((r != 0) & ((r ^ b) < 0));
return q;
}
/* BufferIndexError */
static void __Pyx_RaiseBufferIndexError(int axis) {
PyErr_Format(PyExc_IndexError,
"Out of bounds on buffer access (axis %d)", axis);
}
/* WriteUnraisableException */
static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno,
CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename,
int full_traceback, CYTHON_UNUSED int nogil) {
PyObject *old_exc, *old_val, *old_tb;
PyObject *ctx;
__Pyx_PyThreadState_declare
#ifdef WITH_THREAD
PyGILState_STATE state;
if (nogil)
state = PyGILState_Ensure();
#ifdef _MSC_VER
else state = (PyGILState_STATE)-1;
#endif
#endif
__Pyx_PyThreadState_assign
__Pyx_ErrFetch(&old_exc, &old_val, &old_tb);
if (full_traceback) {
Py_XINCREF(old_exc);
Py_XINCREF(old_val);
Py_XINCREF(old_tb);
__Pyx_ErrRestore(old_exc, old_val, old_tb);
PyErr_PrintEx(1);
}
#if PY_MAJOR_VERSION < 3
ctx = PyString_FromString(name);
#else
ctx = PyUnicode_FromString(name);
#endif
__Pyx_ErrRestore(old_exc, old_val, old_tb);
if (!ctx) {
PyErr_WriteUnraisable(Py_None);
} else {
PyErr_WriteUnraisable(ctx);
Py_DECREF(ctx);
}
#ifdef WITH_THREAD
if (nogil)
PyGILState_Release(state);
#endif
}
/* RaiseArgTupleInvalid */
static void __Pyx_RaiseArgtupleInvalid(
const char* func_name,
int exact,
Py_ssize_t num_min,
Py_ssize_t num_max,
Py_ssize_t num_found)
{
Py_ssize_t num_expected;
const char *more_or_less;
if (num_found < num_min) {
num_expected = num_min;
more_or_less = "at least";
} else {
num_expected = num_max;
more_or_less = "at most";
}
if (exact) {
more_or_less = "exactly";
}
PyErr_Format(PyExc_TypeError,
"%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
func_name, more_or_less, num_expected,
(num_expected == 1) ? "" : "s", num_found);
}
/* RaiseDoubleKeywords */
static void __Pyx_RaiseDoubleKeywordsError(
const char* func_name,
PyObject* kw_name)
{
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION >= 3
"%s() got multiple values for keyword argument '%U'", func_name, kw_name);
#else
"%s() got multiple values for keyword argument '%s'", func_name,
PyString_AsString(kw_name));
#endif
}
/* ParseKeywords */
static int __Pyx_ParseOptionalKeywords(
PyObject *kwds,
PyObject **argnames[],
PyObject *kwds2,
PyObject *values[],
Py_ssize_t num_pos_args,
const char* function_name)
{
PyObject *key = 0, *value = 0;
Py_ssize_t pos = 0;
PyObject*** name;
PyObject*** first_kw_arg = argnames + num_pos_args;
while (PyDict_Next(kwds, &pos, &key, &value)) {
name = first_kw_arg;
while (*name && (**name != key)) name++;
if (*name) {
values[name-argnames] = value;
continue;
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
values[name-argnames] = value;
break;
}
name++;
}
if (*name) continue;
else {
PyObject*** argname = argnames;
while (argname != first_kw_arg) {
if ((**argname == key) || (
(CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
&& _PyString_Eq(**argname, key))) {
goto arg_passed_twice;
}
argname++;
}
}
} else
#endif
if (likely(PyUnicode_Check(key))) {
while (*name) {
int cmp = (**name == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
#endif
PyUnicode_Compare(**name, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
if (cmp == 0) {
values[name-argnames] = value;
break;
}
name++;
}
if (*name) continue;
else {
PyObject*** argname = argnames;
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
#endif
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
if (cmp == 0) goto arg_passed_twice;
argname++;
}
}
} else
goto invalid_keyword_type;
if (kwds2) {
if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
} else {
goto invalid_keyword;
}
}
return 0;
arg_passed_twice:
__Pyx_RaiseDoubleKeywordsError(function_name, key);
goto bad;
invalid_keyword_type:
PyErr_Format(PyExc_TypeError,
"%.200s() keywords must be strings", function_name);
goto bad;
invalid_keyword:
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION < 3
"%.200s() got an unexpected keyword argument '%.200s'",
function_name, PyString_AsString(key));
#else
"%s() got an unexpected keyword argument '%U'",
function_name, key);
#endif
bad:
return -1;
}
/* None */
static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) {
PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname);
}
/* MemviewSliceInit */
static int
__Pyx_init_memviewslice(struct __pyx_memoryview_obj *memview,
int ndim,
__Pyx_memviewslice *memviewslice,
int memview_is_new_reference)
{
__Pyx_RefNannyDeclarations
int i, retval=-1;
Py_buffer *buf = &memview->view;
__Pyx_RefNannySetupContext("init_memviewslice", 0);
if (memviewslice->memview || memviewslice->data) {
PyErr_SetString(PyExc_ValueError,
"memviewslice is already initialized!");
goto fail;
}
if (buf->strides) {
for (i = 0; i < ndim; i++) {
memviewslice->strides[i] = buf->strides[i];
}
} else {
Py_ssize_t stride = buf->itemsize;
for (i = ndim - 1; i >= 0; i--) {
memviewslice->strides[i] = stride;
stride *= buf->shape[i];
}
}
for (i = 0; i < ndim; i++) {
memviewslice->shape[i] = buf->shape[i];
if (buf->suboffsets) {
memviewslice->suboffsets[i] = buf->suboffsets[i];
} else {
memviewslice->suboffsets[i] = -1;
}
}
memviewslice->memview = memview;
memviewslice->data = (char *)buf->buf;
if (__pyx_add_acquisition_count(memview) == 0 && !memview_is_new_reference) {
Py_INCREF(memview);
}
retval = 0;
goto no_fail;
fail:
memviewslice->memview = 0;
memviewslice->data = 0;
retval = -1;
no_fail:
__Pyx_RefNannyFinishContext();
return retval;
}
#ifndef Py_NO_RETURN
#define Py_NO_RETURN
#endif
static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
va_list vargs;
char msg[200];
#ifdef HAVE_STDARG_PROTOTYPES
va_start(vargs, fmt);
#else
va_start(vargs);
#endif
vsnprintf(msg, 200, fmt, vargs);
va_end(vargs);
Py_FatalError(msg);
}
static CYTHON_INLINE int
__pyx_add_acquisition_count_locked(__pyx_atomic_int *acquisition_count,
PyThread_type_lock lock)
{
int result;
PyThread_acquire_lock(lock, 1);
result = (*acquisition_count)++;
PyThread_release_lock(lock);
return result;
}
static CYTHON_INLINE int
__pyx_sub_acquisition_count_locked(__pyx_atomic_int *acquisition_count,
PyThread_type_lock lock)
{
int result;
PyThread_acquire_lock(lock, 1);
result = (*acquisition_count)--;
PyThread_release_lock(lock);
return result;
}
static CYTHON_INLINE void
__Pyx_INC_MEMVIEW(__Pyx_memviewslice *memslice, int have_gil, int lineno)
{
int first_time;
struct __pyx_memoryview_obj *memview = memslice->memview;
if (!memview || (PyObject *) memview == Py_None)
return;
if (__pyx_get_slice_count(memview) < 0)
__pyx_fatalerror("Acquisition count is %d (line %d)",
__pyx_get_slice_count(memview), lineno);
first_time = __pyx_add_acquisition_count(memview) == 0;
if (first_time) {
if (have_gil) {
Py_INCREF((PyObject *) memview);
} else {
PyGILState_STATE _gilstate = PyGILState_Ensure();
Py_INCREF((PyObject *) memview);
PyGILState_Release(_gilstate);
}
}
}
static CYTHON_INLINE void __Pyx_XDEC_MEMVIEW(__Pyx_memviewslice *memslice,
int have_gil, int lineno) {
int last_time;
struct __pyx_memoryview_obj *memview = memslice->memview;
if (!memview ) {
return;
} else if ((PyObject *) memview == Py_None) {
memslice->memview = NULL;
return;
}
if (__pyx_get_slice_count(memview) <= 0)
__pyx_fatalerror("Acquisition count is %d (line %d)",
__pyx_get_slice_count(memview), lineno);
last_time = __pyx_sub_acquisition_count(memview) == 1;
memslice->data = NULL;
if (last_time) {
if (have_gil) {
Py_CLEAR(memslice->memview);
} else {
PyGILState_STATE _gilstate = PyGILState_Ensure();
Py_CLEAR(memslice->memview);
PyGILState_Release(_gilstate);
}
} else {
memslice->memview = NULL;
}
}
/* PyDictVersioning */
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
PyObject *dict = Py_TYPE(obj)->tp_dict;
return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
}
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
PyObject **dictptr = NULL;
Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
if (offset) {
#if CYTHON_COMPILING_IN_CPYTHON
dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
#else
dictptr = _PyObject_GetDictPtr(obj);
#endif
}
return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
}
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
PyObject *dict = Py_TYPE(obj)->tp_dict;
if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
return 0;
return obj_dict_version == __Pyx_get_object_dict_version(obj);
}
#endif
/* GetModuleGlobalName */
#if CYTHON_USE_DICT_VERSIONS
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)
#else
static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name)
#endif
{
PyObject *result;
#if !CYTHON_AVOID_BORROWED_REFS
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
} else if (unlikely(PyErr_Occurred())) {
return NULL;
}
#else
result = PyDict_GetItem(__pyx_d, name);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
}
#endif
#else
result = PyObject_GetItem(__pyx_d, name);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
}
PyErr_Clear();
#endif
return __Pyx_GetBuiltinName(name);
}
/* PyFunctionFastCall */
#if CYTHON_FAST_PYCALL
static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na,
PyObject *globals) {
PyFrameObject *f;
PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject **fastlocals;
Py_ssize_t i;
PyObject *result;
assert(globals != NULL);
/* XXX Perhaps we should create a specialized
PyFrame_New() that doesn't take locals, but does
take builtins without sanity checking them.
*/
assert(tstate != NULL);
f = PyFrame_New(tstate, co, globals, NULL);
if (f == NULL) {
return NULL;
}
fastlocals = __Pyx_PyFrame_GetLocalsplus(f);
for (i = 0; i < na; i++) {
Py_INCREF(*args);
fastlocals[i] = *args++;
}
result = PyEval_EvalFrameEx(f,0);
++tstate->recursion_depth;
Py_DECREF(f);
--tstate->recursion_depth;
return result;
}
#if 1 || PY_VERSION_HEX < 0x030600B1
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) {
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
PyObject *globals = PyFunction_GET_GLOBALS(func);
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
PyObject *closure;
#if PY_MAJOR_VERSION >= 3
PyObject *kwdefs;
#endif
PyObject *kwtuple, **k;
PyObject **d;
Py_ssize_t nd;
Py_ssize_t nk;
PyObject *result;
assert(kwargs == NULL || PyDict_Check(kwargs));
nk = kwargs ? PyDict_Size(kwargs) : 0;
if (Py_EnterRecursiveCall((char*)" while calling a Python object")) {
return NULL;
}
if (
#if PY_MAJOR_VERSION >= 3
co->co_kwonlyargcount == 0 &&
#endif
likely(kwargs == NULL || nk == 0) &&
co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) {
if (argdefs == NULL && co->co_argcount == nargs) {
result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals);
goto done;
}
else if (nargs == 0 && argdefs != NULL
&& co->co_argcount == Py_SIZE(argdefs)) {
/* function called with no arguments, but all parameters have
a default value: use default values as arguments .*/
args = &PyTuple_GET_ITEM(argdefs, 0);
result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals);
goto done;
}
}
if (kwargs != NULL) {
Py_ssize_t pos, i;
kwtuple = PyTuple_New(2 * nk);
if (kwtuple == NULL) {
result = NULL;
goto done;
}
k = &PyTuple_GET_ITEM(kwtuple, 0);
pos = i = 0;
while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) {
Py_INCREF(k[i]);
Py_INCREF(k[i+1]);
i += 2;
}
nk = i / 2;
}
else {
kwtuple = NULL;
k = NULL;
}
closure = PyFunction_GET_CLOSURE(func);
#if PY_MAJOR_VERSION >= 3
kwdefs = PyFunction_GET_KW_DEFAULTS(func);
#endif
if (argdefs != NULL) {
d = &PyTuple_GET_ITEM(argdefs, 0);
nd = Py_SIZE(argdefs);
}
else {
d = NULL;
nd = 0;
}
#if PY_MAJOR_VERSION >= 3
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
args, (int)nargs,
k, (int)nk,
d, (int)nd, kwdefs, closure);
#else
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
args, (int)nargs,
k, (int)nk,
d, (int)nd, closure);
#endif
Py_XDECREF(kwtuple);
done:
Py_LeaveRecursiveCall();
return result;
}
#endif
#endif
/* PyCFunctionFastCall */
#if CYTHON_FAST_PYCCALL
static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) {
PyCFunctionObject *func = (PyCFunctionObject*)func_obj;
PyCFunction meth = PyCFunction_GET_FUNCTION(func);
PyObject *self = PyCFunction_GET_SELF(func);
int flags = PyCFunction_GET_FLAGS(func);
assert(PyCFunction_Check(func));
assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)));
assert(nargs >= 0);
assert(nargs == 0 || args != NULL);
/* _PyCFunction_FastCallDict() must not be called with an exception set,
because it may clear it (directly or indirectly) and so the
caller loses its exception */
assert(!PyErr_Occurred());
if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) {
return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL);
} else {
return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs);
}
}
#endif
/* PyObjectCallMethO */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) {
PyObject *self, *result;
PyCFunction cfunc;
cfunc = PyCFunction_GET_FUNCTION(func);
self = PyCFunction_GET_SELF(func);
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
return NULL;
result = cfunc(self, arg);
Py_LeaveRecursiveCall();
if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
PyErr_SetString(
PyExc_SystemError,
"NULL result without error in PyObject_Call");
}
return result;
}
#endif
/* PyObjectCallOneArg */
#if CYTHON_COMPILING_IN_CPYTHON
static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) {
PyObject *result;
PyObject *args = PyTuple_New(1);
if (unlikely(!args)) return NULL;
Py_INCREF(arg);
PyTuple_SET_ITEM(args, 0, arg);
result = __Pyx_PyObject_Call(func, args, NULL);
Py_DECREF(args);
return result;
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(func)) {
return __Pyx_PyFunction_FastCall(func, &arg, 1);
}
#endif
if (likely(PyCFunction_Check(func))) {
if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {
return __Pyx_PyObject_CallMethO(func, arg);
#if CYTHON_FAST_PYCCALL
} else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) {
return __Pyx_PyCFunction_FastCall(func, &arg, 1);
#endif
}
}
return __Pyx__PyObject_CallOneArg(func, arg);
}
#else
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {
PyObject *result;
PyObject *args = PyTuple_Pack(1, arg);
if (unlikely(!args)) return NULL;
result = __Pyx_PyObject_Call(func, args, NULL);
Py_DECREF(args);
return result;
}
#endif
/* GetItemInt */
static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) {
PyObject *r;
if (!j) return NULL;
r = PyObject_GetItem(o, j);
Py_DECREF(j);
return r;
}
static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,
CYTHON_NCP_UNUSED int wraparound,
CYTHON_NCP_UNUSED int boundscheck) {
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
Py_ssize_t wrapped_i = i;
if (wraparound & unlikely(i < 0)) {
wrapped_i += PyList_GET_SIZE(o);
}
if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) {
PyObject *r = PyList_GET_ITEM(o, wrapped_i);
Py_INCREF(r);
return r;
}
return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
#else
return PySequence_GetItem(o, i);
#endif
}
static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,
CYTHON_NCP_UNUSED int wraparound,
CYTHON_NCP_UNUSED int boundscheck) {
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS
Py_ssize_t wrapped_i = i;
if (wraparound & unlikely(i < 0)) {
wrapped_i += PyTuple_GET_SIZE(o);
}
if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) {
PyObject *r = PyTuple_GET_ITEM(o, wrapped_i);
Py_INCREF(r);
return r;
}
return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
#else
return PySequence_GetItem(o, i);
#endif
}
static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list,
CYTHON_NCP_UNUSED int wraparound,
CYTHON_NCP_UNUSED int boundscheck) {
#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS
if (is_list || PyList_CheckExact(o)) {
Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o);
if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) {
PyObject *r = PyList_GET_ITEM(o, n);
Py_INCREF(r);
return r;
}
}
else if (PyTuple_CheckExact(o)) {
Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o);
if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) {
PyObject *r = PyTuple_GET_ITEM(o, n);
Py_INCREF(r);
return r;
}
} else {
PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence;
if (likely(m && m->sq_item)) {
if (wraparound && unlikely(i < 0) && likely(m->sq_length)) {
Py_ssize_t l = m->sq_length(o);
if (likely(l >= 0)) {
i += l;
} else {
if (!PyErr_ExceptionMatches(PyExc_OverflowError))
return NULL;
PyErr_Clear();
}
}
return m->sq_item(o, i);
}
}
#else
if (is_list || PySequence_Check(o)) {
return PySequence_GetItem(o, i);
}
#endif
return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
}
/* ObjectGetItem */
#if CYTHON_USE_TYPE_SLOTS
static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject* index) {
PyObject *runerr;
Py_ssize_t key_value;
PySequenceMethods *m = Py_TYPE(obj)->tp_as_sequence;
if (unlikely(!(m && m->sq_item))) {
PyErr_Format(PyExc_TypeError, "'%.200s' object is not subscriptable", Py_TYPE(obj)->tp_name);
return NULL;
}
key_value = __Pyx_PyIndex_AsSsize_t(index);
if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) {
return __Pyx_GetItemInt_Fast(obj, key_value, 0, 1, 1);
}
if (PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) {
PyErr_Clear();
PyErr_Format(PyExc_IndexError, "cannot fit '%.200s' into an index-sized integer", Py_TYPE(index)->tp_name);
}
return NULL;
}
static PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* key) {
PyMappingMethods *m = Py_TYPE(obj)->tp_as_mapping;
if (likely(m && m->mp_subscript)) {
return m->mp_subscript(obj, key);
}
return __Pyx_PyObject_GetIndex(obj, key);
}
#endif
/* PyIntBinop */
#if !CYTHON_COMPILING_IN_PYPY
static PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, int inplace, int zerodivision_check) {
(void)inplace;
(void)zerodivision_check;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_CheckExact(op1))) {
const long b = intval;
long x;
long a = PyInt_AS_LONG(op1);
x = (long)((unsigned long)a + b);
if (likely((x^a) >= 0 || (x^b) >= 0))
return PyInt_FromLong(x);
return PyLong_Type.tp_as_number->nb_add(op1, op2);
}
#endif
#if CYTHON_USE_PYLONG_INTERNALS
if (likely(PyLong_CheckExact(op1))) {
const long b = intval;
long a, x;
#ifdef HAVE_LONG_LONG
const PY_LONG_LONG llb = intval;
PY_LONG_LONG lla, llx;
#endif
const digit* digits = ((PyLongObject*)op1)->ob_digit;
const Py_ssize_t size = Py_SIZE(op1);
if (likely(__Pyx_sst_abs(size) <= 1)) {
a = likely(size) ? digits[0] : 0;
if (size == -1) a = -a;
} else {
switch (size) {
case -2:
if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) {
lla = -(PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case 2:
if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) {
lla = (PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case -3:
if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) {
lla = -(PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case 3:
if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) {
lla = (PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case -4:
if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) {
lla = -(PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case 4:
if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) {
lla = (PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
default: return PyLong_Type.tp_as_number->nb_add(op1, op2);
}
}
x = a + b;
return PyLong_FromLong(x);
#ifdef HAVE_LONG_LONG
long_long:
llx = lla + llb;
return PyLong_FromLongLong(llx);
#endif
}
#endif
if (PyFloat_CheckExact(op1)) {
const long b = intval;
double a = PyFloat_AS_DOUBLE(op1);
double result;
PyFPE_START_PROTECT("add", return NULL)
result = ((double)a) + (double)b;
PyFPE_END_PROTECT(result)
return PyFloat_FromDouble(result);
}
return (inplace ? PyNumber_InPlaceAdd : PyNumber_Add)(op1, op2);
}
#endif
/* PyIntBinop */
#if !CYTHON_COMPILING_IN_PYPY
static PyObject* __Pyx_PyInt_SubtractObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, int inplace, int zerodivision_check) {
(void)inplace;
(void)zerodivision_check;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_CheckExact(op1))) {
const long b = intval;
long x;
long a = PyInt_AS_LONG(op1);
x = (long)((unsigned long)a - b);
if (likely((x^a) >= 0 || (x^~b) >= 0))
return PyInt_FromLong(x);
return PyLong_Type.tp_as_number->nb_subtract(op1, op2);
}
#endif
#if CYTHON_USE_PYLONG_INTERNALS
if (likely(PyLong_CheckExact(op1))) {
const long b = intval;
long a, x;
#ifdef HAVE_LONG_LONG
const PY_LONG_LONG llb = intval;
PY_LONG_LONG lla, llx;
#endif
const digit* digits = ((PyLongObject*)op1)->ob_digit;
const Py_ssize_t size = Py_SIZE(op1);
if (likely(__Pyx_sst_abs(size) <= 1)) {
a = likely(size) ? digits[0] : 0;
if (size == -1) a = -a;
} else {
switch (size) {
case -2:
if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) {
lla = -(PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case 2:
if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) {
lla = (PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case -3:
if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) {
lla = -(PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case 3:
if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) {
lla = (PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case -4:
if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) {
lla = -(PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
case 4:
if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
break;
#ifdef HAVE_LONG_LONG
} else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) {
lla = (PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));
goto long_long;
#endif
}
CYTHON_FALLTHROUGH;
default: return PyLong_Type.tp_as_number->nb_subtract(op1, op2);
}
}
x = a - b;
return PyLong_FromLong(x);
#ifdef HAVE_LONG_LONG
long_long:
llx = lla - llb;
return PyLong_FromLongLong(llx);
#endif
}
#endif
if (PyFloat_CheckExact(op1)) {
const long b = intval;
double a = PyFloat_AS_DOUBLE(op1);
double result;
PyFPE_START_PROTECT("subtract", return NULL)
result = ((double)a) - (double)b;
PyFPE_END_PROTECT(result)
return PyFloat_FromDouble(result);
}
return (inplace ? PyNumber_InPlaceSubtract : PyNumber_Subtract)(op1, op2);
}
#endif
/* ArgTypeTest */
static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact)
{
if (unlikely(!type)) {
PyErr_SetString(PyExc_SystemError, "Missing type object");
return 0;
}
else if (exact) {
#if PY_MAJOR_VERSION == 2
if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1;
#endif
}
else {
if (likely(__Pyx_TypeCheck(obj, type))) return 1;
}
PyErr_Format(PyExc_TypeError,
"Argument '%.200s' has incorrect type (expected %.200s, got %.200s)",
name, type->tp_name, Py_TYPE(obj)->tp_name);
return 0;
}
/* DictGetItem */
#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) {
PyObject *value;
value = PyDict_GetItemWithError(d, key);
if (unlikely(!value)) {
if (!PyErr_Occurred()) {
if (unlikely(PyTuple_Check(key))) {
PyObject* args = PyTuple_Pack(1, key);
if (likely(args)) {
PyErr_SetObject(PyExc_KeyError, args);
Py_DECREF(args);
}
} else {
PyErr_SetObject(PyExc_KeyError, key);
}
}
return NULL;
}
Py_INCREF(value);
return value;
}
#endif
/* RaiseTooManyValuesToUnpack */
static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) {
PyErr_Format(PyExc_ValueError,
"too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected);
}
/* RaiseNeedMoreValuesToUnpack */
static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) {
PyErr_Format(PyExc_ValueError,
"need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack",
index, (index == 1) ? "" : "s");
}
/* RaiseNoneIterError */
static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable");
}
/* ExtTypeTest */
static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) {
if (unlikely(!type)) {
PyErr_SetString(PyExc_SystemError, "Missing type object");
return 0;
}
if (likely(__Pyx_TypeCheck(obj, type)))
return 1;
PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s",
Py_TYPE(obj)->tp_name, type->tp_name);
return 0;
}
/* GetTopmostException */
#if CYTHON_USE_EXC_INFO_STACK
static _PyErr_StackItem *
__Pyx_PyErr_GetTopmostException(PyThreadState *tstate)
{
_PyErr_StackItem *exc_info = tstate->exc_info;
while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) &&
exc_info->previous_item != NULL)
{
exc_info = exc_info->previous_item;
}
return exc_info;
}
#endif
/* SaveResetException */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate);
*type = exc_info->exc_type;
*value = exc_info->exc_value;
*tb = exc_info->exc_traceback;
#else
*type = tstate->exc_type;
*value = tstate->exc_value;
*tb = tstate->exc_traceback;
#endif
Py_XINCREF(*type);
Py_XINCREF(*value);
Py_XINCREF(*tb);
}
static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = type;
exc_info->exc_value = value;
exc_info->exc_traceback = tb;
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = type;
tstate->exc_value = value;
tstate->exc_traceback = tb;
#endif
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
}
#endif
/* PyErrExceptionMatches */
#if CYTHON_FAST_THREAD_STATE
static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
Py_ssize_t i, n;
n = PyTuple_GET_SIZE(tuple);
#if PY_MAJOR_VERSION >= 3
for (i=0; i<n; i++) {
if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
}
#endif
for (i=0; i<n; i++) {
if (__Pyx_PyErr_GivenExceptionMatches(exc_type, PyTuple_GET_ITEM(tuple, i))) return 1;
}
return 0;
}
static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) {
PyObject *exc_type = tstate->curexc_type;
if (exc_type == err) return 1;
if (unlikely(!exc_type)) return 0;
if (unlikely(PyTuple_Check(err)))
return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err);
return __Pyx_PyErr_GivenExceptionMatches(exc_type, err);
}
#endif
/* GetException */
#if CYTHON_FAST_THREAD_STATE
static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb)
#else
static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb)
#endif
{
PyObject *local_type, *local_value, *local_tb;
#if CYTHON_FAST_THREAD_STATE
PyObject *tmp_type, *tmp_value, *tmp_tb;
local_type = tstate->curexc_type;
local_value = tstate->curexc_value;
local_tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
#else
PyErr_Fetch(&local_type, &local_value, &local_tb);
#endif
PyErr_NormalizeException(&local_type, &local_value, &local_tb);
#if CYTHON_FAST_THREAD_STATE
if (unlikely(tstate->curexc_type))
#else
if (unlikely(PyErr_Occurred()))
#endif
goto bad;
#if PY_MAJOR_VERSION >= 3
if (local_tb) {
if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0))
goto bad;
}
#endif
Py_XINCREF(local_tb);
Py_XINCREF(local_type);
Py_XINCREF(local_value);
*type = local_type;
*value = local_value;
*tb = local_tb;
#if CYTHON_FAST_THREAD_STATE
#if CYTHON_USE_EXC_INFO_STACK
{
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = local_type;
exc_info->exc_value = local_value;
exc_info->exc_traceback = local_tb;
}
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = local_type;
tstate->exc_value = local_value;
tstate->exc_traceback = local_tb;
#endif
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
#else
PyErr_SetExcInfo(local_type, local_value, local_tb);
#endif
return 0;
bad:
*type = 0;
*value = 0;
*tb = 0;
Py_XDECREF(local_type);
Py_XDECREF(local_value);
Py_XDECREF(local_tb);
return -1;
}
/* PyObjectCall2Args */
static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) {
PyObject *args, *result = NULL;
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(function)) {
PyObject *args[2] = {arg1, arg2};
return __Pyx_PyFunction_FastCall(function, args, 2);
}
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(function)) {
PyObject *args[2] = {arg1, arg2};
return __Pyx_PyCFunction_FastCall(function, args, 2);
}
#endif
args = PyTuple_New(2);
if (unlikely(!args)) goto done;
Py_INCREF(arg1);
PyTuple_SET_ITEM(args, 0, arg1);
Py_INCREF(arg2);
PyTuple_SET_ITEM(args, 1, arg2);
Py_INCREF(function);
result = __Pyx_PyObject_Call(function, args, NULL);
Py_DECREF(args);
Py_DECREF(function);
done:
return result;
}
/* BytesEquals */
static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) {
#if CYTHON_COMPILING_IN_PYPY
return PyObject_RichCompareBool(s1, s2, equals);
#else
if (s1 == s2) {
return (equals == Py_EQ);
} else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) {
const char *ps1, *ps2;
Py_ssize_t length = PyBytes_GET_SIZE(s1);
if (length != PyBytes_GET_SIZE(s2))
return (equals == Py_NE);
ps1 = PyBytes_AS_STRING(s1);
ps2 = PyBytes_AS_STRING(s2);
if (ps1[0] != ps2[0]) {
return (equals == Py_NE);
} else if (length == 1) {
return (equals == Py_EQ);
} else {
int result;
#if CYTHON_USE_UNICODE_INTERNALS
Py_hash_t hash1, hash2;
hash1 = ((PyBytesObject*)s1)->ob_shash;
hash2 = ((PyBytesObject*)s2)->ob_shash;
if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
return (equals == Py_NE);
}
#endif
result = memcmp(ps1, ps2, (size_t)length);
return (equals == Py_EQ) ? (result == 0) : (result != 0);
}
} else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) {
return (equals == Py_NE);
} else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) {
return (equals == Py_NE);
} else {
int result;
PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
if (!py_result)
return -1;
result = __Pyx_PyObject_IsTrue(py_result);
Py_DECREF(py_result);
return result;
}
#endif
}
/* UnicodeEquals */
static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) {
#if CYTHON_COMPILING_IN_PYPY
return PyObject_RichCompareBool(s1, s2, equals);
#else
#if PY_MAJOR_VERSION < 3
PyObject* owned_ref = NULL;
#endif
int s1_is_unicode, s2_is_unicode;
if (s1 == s2) {
goto return_eq;
}
s1_is_unicode = PyUnicode_CheckExact(s1);
s2_is_unicode = PyUnicode_CheckExact(s2);
#if PY_MAJOR_VERSION < 3
if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) {
owned_ref = PyUnicode_FromObject(s2);
if (unlikely(!owned_ref))
return -1;
s2 = owned_ref;
s2_is_unicode = 1;
} else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) {
owned_ref = PyUnicode_FromObject(s1);
if (unlikely(!owned_ref))
return -1;
s1 = owned_ref;
s1_is_unicode = 1;
} else if (((!s2_is_unicode) & (!s1_is_unicode))) {
return __Pyx_PyBytes_Equals(s1, s2, equals);
}
#endif
if (s1_is_unicode & s2_is_unicode) {
Py_ssize_t length;
int kind;
void *data1, *data2;
if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0))
return -1;
length = __Pyx_PyUnicode_GET_LENGTH(s1);
if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) {
goto return_ne;
}
#if CYTHON_USE_UNICODE_INTERNALS
{
Py_hash_t hash1, hash2;
#if CYTHON_PEP393_ENABLED
hash1 = ((PyASCIIObject*)s1)->hash;
hash2 = ((PyASCIIObject*)s2)->hash;
#else
hash1 = ((PyUnicodeObject*)s1)->hash;
hash2 = ((PyUnicodeObject*)s2)->hash;
#endif
if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
goto return_ne;
}
}
#endif
kind = __Pyx_PyUnicode_KIND(s1);
if (kind != __Pyx_PyUnicode_KIND(s2)) {
goto return_ne;
}
data1 = __Pyx_PyUnicode_DATA(s1);
data2 = __Pyx_PyUnicode_DATA(s2);
if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) {
goto return_ne;
} else if (length == 1) {
goto return_eq;
} else {
int result = memcmp(data1, data2, (size_t)(length * kind));
#if PY_MAJOR_VERSION < 3
Py_XDECREF(owned_ref);
#endif
return (equals == Py_EQ) ? (result == 0) : (result != 0);
}
} else if ((s1 == Py_None) & s2_is_unicode) {
goto return_ne;
} else if ((s2 == Py_None) & s1_is_unicode) {
goto return_ne;
} else {
int result;
PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
#if PY_MAJOR_VERSION < 3
Py_XDECREF(owned_ref);
#endif
if (!py_result)
return -1;
result = __Pyx_PyObject_IsTrue(py_result);
Py_DECREF(py_result);
return result;
}
return_eq:
#if PY_MAJOR_VERSION < 3
Py_XDECREF(owned_ref);
#endif
return (equals == Py_EQ);
return_ne:
#if PY_MAJOR_VERSION < 3
Py_XDECREF(owned_ref);
#endif
return (equals == Py_NE);
#endif
}
/* None */
static CYTHON_INLINE Py_ssize_t __Pyx_div_Py_ssize_t(Py_ssize_t a, Py_ssize_t b) {
Py_ssize_t q = a / b;
Py_ssize_t r = a - q*b;
q -= ((r != 0) & ((r ^ b) < 0));
return q;
}
/* GetAttr */
static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) {
#if CYTHON_USE_TYPE_SLOTS
#if PY_MAJOR_VERSION >= 3
if (likely(PyUnicode_Check(n)))
#else
if (likely(PyString_Check(n)))
#endif
return __Pyx_PyObject_GetAttrStr(o, n);
#endif
return PyObject_GetAttr(o, n);
}
/* decode_c_string */
static CYTHON_INLINE PyObject* __Pyx_decode_c_string(
const char* cstring, Py_ssize_t start, Py_ssize_t stop,
const char* encoding, const char* errors,
PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) {
Py_ssize_t length;
if (unlikely((start < 0) | (stop < 0))) {
size_t slen = strlen(cstring);
if (unlikely(slen > (size_t) PY_SSIZE_T_MAX)) {
PyErr_SetString(PyExc_OverflowError,
"c-string too long to convert to Python");
return NULL;
}
length = (Py_ssize_t) slen;
if (start < 0) {
start += length;
if (start < 0)
start = 0;
}
if (stop < 0)
stop += length;
}
length = stop - start;
if (unlikely(length <= 0))
return PyUnicode_FromUnicode(NULL, 0);
cstring += start;
if (decode_func) {
return decode_func(cstring, length, errors);
} else {
return PyUnicode_Decode(cstring, length, encoding, errors);
}
}
/* GetAttr3 */
static PyObject *__Pyx_GetAttr3Default(PyObject *d) {
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
return NULL;
__Pyx_PyErr_Clear();
Py_INCREF(d);
return d;
}
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
PyObject *r = __Pyx_GetAttr(o, n);
return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
}
/* SwapException */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = *type;
exc_info->exc_value = *value;
exc_info->exc_traceback = *tb;
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = *type;
tstate->exc_value = *value;
tstate->exc_traceback = *tb;
#endif
*type = tmp_type;
*value = tmp_value;
*tb = tmp_tb;
}
#else
static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb);
PyErr_SetExcInfo(*type, *value, *tb);
*type = tmp_type;
*value = tmp_value;
*tb = tmp_tb;
}
#endif
/* Import */
static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
PyObject *empty_list = 0;
PyObject *module = 0;
PyObject *global_dict = 0;
PyObject *empty_dict = 0;
PyObject *list;
#if PY_MAJOR_VERSION < 3
PyObject *py_import;
py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import);
if (!py_import)
goto bad;
#endif
if (from_list)
list = from_list;
else {
empty_list = PyList_New(0);
if (!empty_list)
goto bad;
list = empty_list;
}
global_dict = PyModule_GetDict(__pyx_m);
if (!global_dict)
goto bad;
empty_dict = PyDict_New();
if (!empty_dict)
goto bad;
{
#if PY_MAJOR_VERSION >= 3
if (level == -1) {
if (strchr(__Pyx_MODULE_NAME, '.')) {
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, 1);
if (!module) {
if (!PyErr_ExceptionMatches(PyExc_ImportError))
goto bad;
PyErr_Clear();
}
}
level = 0;
}
#endif
if (!module) {
#if PY_MAJOR_VERSION < 3
PyObject *py_level = PyInt_FromLong(level);
if (!py_level)
goto bad;
module = PyObject_CallFunctionObjArgs(py_import,
name, global_dict, empty_dict, list, py_level, (PyObject *)NULL);
Py_DECREF(py_level);
#else
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, level);
#endif
}
}
bad:
#if PY_MAJOR_VERSION < 3
Py_XDECREF(py_import);
#endif
Py_XDECREF(empty_list);
Py_XDECREF(empty_dict);
return module;
}
/* FastTypeChecks */
#if CYTHON_COMPILING_IN_CPYTHON
static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
while (a) {
a = a->tp_base;
if (a == b)
return 1;
}
return b == &PyBaseObject_Type;
}
static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
PyObject *mro;
if (a == b) return 1;
mro = a->tp_mro;
if (likely(mro)) {
Py_ssize_t i, n;
n = PyTuple_GET_SIZE(mro);
for (i = 0; i < n; i++) {
if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
return 1;
}
return 0;
}
return __Pyx_InBases(a, b);
}
#if PY_MAJOR_VERSION == 2
static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
PyObject *exception, *value, *tb;
int res;
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ErrFetch(&exception, &value, &tb);
res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
if (unlikely(res == -1)) {
PyErr_WriteUnraisable(err);
res = 0;
}
if (!res) {
res = PyObject_IsSubclass(err, exc_type2);
if (unlikely(res == -1)) {
PyErr_WriteUnraisable(err);
res = 0;
}
}
__Pyx_ErrRestore(exception, value, tb);
return res;
}
#else
static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
if (!res) {
res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
}
return res;
}
#endif
static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
Py_ssize_t i, n;
assert(PyExceptionClass_Check(exc_type));
n = PyTuple_GET_SIZE(tuple);
#if PY_MAJOR_VERSION >= 3
for (i=0; i<n; i++) {
if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
}
#endif
for (i=0; i<n; i++) {
PyObject *t = PyTuple_GET_ITEM(tuple, i);
#if PY_MAJOR_VERSION < 3
if (likely(exc_type == t)) return 1;
#endif
if (likely(PyExceptionClass_Check(t))) {
if (__Pyx_inner_PyErr_GivenExceptionMatches2(exc_type, NULL, t)) return 1;
} else {
}
}
return 0;
}
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
if (likely(err == exc_type)) return 1;
if (likely(PyExceptionClass_Check(err))) {
if (likely(PyExceptionClass_Check(exc_type))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);
} else if (likely(PyTuple_Check(exc_type))) {
return __Pyx_PyErr_GivenExceptionMatchesTuple(err, exc_type);
} else {
}
}
return PyErr_GivenExceptionMatches(err, exc_type);
}
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
assert(PyExceptionClass_Check(exc_type1));
assert(PyExceptionClass_Check(exc_type2));
if (likely(err == exc_type1 || err == exc_type2)) return 1;
if (likely(PyExceptionClass_Check(err))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
}
return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
}
#endif
/* ImportFrom */
static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) {
PyObject* value = __Pyx_PyObject_GetAttrStr(module, name);
if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Format(PyExc_ImportError,
#if PY_MAJOR_VERSION < 3
"cannot import name %.230s", PyString_AS_STRING(name));
#else
"cannot import name %S", name);
#endif
}
return value;
}
/* HasAttr */
static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) {
PyObject *r;
if (unlikely(!__Pyx_PyBaseString_Check(n))) {
PyErr_SetString(PyExc_TypeError,
"hasattr(): attribute name must be string");
return -1;
}
r = __Pyx_GetAttr(o, n);
if (unlikely(!r)) {
PyErr_Clear();
return 0;
} else {
Py_DECREF(r);
return 1;
}
}
/* PyObject_GenericGetAttrNoDict */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) {
PyErr_Format(PyExc_AttributeError,
#if PY_MAJOR_VERSION >= 3
"'%.50s' object has no attribute '%U'",
tp->tp_name, attr_name);
#else
"'%.50s' object has no attribute '%.400s'",
tp->tp_name, PyString_AS_STRING(attr_name));
#endif
return NULL;
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) {
PyObject *descr;
PyTypeObject *tp = Py_TYPE(obj);
if (unlikely(!PyString_Check(attr_name))) {
return PyObject_GenericGetAttr(obj, attr_name);
}
assert(!tp->tp_dictoffset);
descr = _PyType_Lookup(tp, attr_name);
if (unlikely(!descr)) {
return __Pyx_RaiseGenericGetAttributeError(tp, attr_name);
}
Py_INCREF(descr);
#if PY_MAJOR_VERSION < 3
if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS)))
#endif
{
descrgetfunc f = Py_TYPE(descr)->tp_descr_get;
if (unlikely(f)) {
PyObject *res = f(descr, obj, (PyObject *)tp);
Py_DECREF(descr);
return res;
}
}
return descr;
}
#endif
/* PyObject_GenericGetAttr */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) {
if (unlikely(Py_TYPE(obj)->tp_dictoffset)) {
return PyObject_GenericGetAttr(obj, attr_name);
}
return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name);
}
#endif
/* SetVTable */
static int __Pyx_SetVtable(PyObject *dict, void *vtable) {
#if PY_VERSION_HEX >= 0x02070000
PyObject *ob = PyCapsule_New(vtable, 0, 0);
#else
PyObject *ob = PyCObject_FromVoidPtr(vtable, 0);
#endif
if (!ob)
goto bad;
if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0)
goto bad;
Py_DECREF(ob);
return 0;
bad:
Py_XDECREF(ob);
return -1;
}
/* SetupReduce */
static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
int ret;
PyObject *name_attr;
name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name_2);
if (likely(name_attr)) {
ret = PyObject_RichCompareBool(name_attr, name, Py_EQ);
} else {
ret = -1;
}
if (unlikely(ret < 0)) {
PyErr_Clear();
ret = 0;
}
Py_XDECREF(name_attr);
return ret;
}
static int __Pyx_setup_reduce(PyObject* type_obj) {
int ret = 0;
PyObject *object_reduce = NULL;
PyObject *object_reduce_ex = NULL;
PyObject *reduce = NULL;
PyObject *reduce_ex = NULL;
PyObject *reduce_cython = NULL;
PyObject *setstate = NULL;
PyObject *setstate_cython = NULL;
#if CYTHON_USE_PYTYPE_LOOKUP
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#else
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#endif
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#else
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#endif
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD;
if (reduce_ex == object_reduce_ex) {
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#else
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#endif
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD;
if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) {
reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto __PYX_BAD;
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate);
if (!setstate) PyErr_Clear();
if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) {
setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto __PYX_BAD;
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
}
PyType_Modified((PyTypeObject*)type_obj);
}
}
goto __PYX_GOOD;
__PYX_BAD:
if (!PyErr_Occurred())
PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
ret = -1;
__PYX_GOOD:
#if !CYTHON_USE_PYTYPE_LOOKUP
Py_XDECREF(object_reduce);
Py_XDECREF(object_reduce_ex);
#endif
Py_XDECREF(reduce);
Py_XDECREF(reduce_ex);
Py_XDECREF(reduce_cython);
Py_XDECREF(setstate);
Py_XDECREF(setstate_cython);
return ret;
}
/* TypeImport */
#ifndef __PYX_HAVE_RT_ImportType
#define __PYX_HAVE_RT_ImportType
static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name,
size_t size, enum __Pyx_ImportType_CheckSize check_size)
{
PyObject *result = 0;
char warning[200];
Py_ssize_t basicsize;
#ifdef Py_LIMITED_API
PyObject *py_basicsize;
#endif
result = PyObject_GetAttrString(module, class_name);
if (!result)
goto bad;
if (!PyType_Check(result)) {
PyErr_Format(PyExc_TypeError,
"%.200s.%.200s is not a type object",
module_name, class_name);
goto bad;
}
#ifndef Py_LIMITED_API
basicsize = ((PyTypeObject *)result)->tp_basicsize;
#else
py_basicsize = PyObject_GetAttrString(result, "__basicsize__");
if (!py_basicsize)
goto bad;
basicsize = PyLong_AsSsize_t(py_basicsize);
Py_DECREF(py_basicsize);
py_basicsize = 0;
if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred())
goto bad;
#endif
if ((size_t)basicsize < size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
goto bad;
}
if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
goto bad;
}
else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) {
PyOS_snprintf(warning, sizeof(warning),
"%s.%s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad;
}
return (PyTypeObject *)result;
bad:
Py_XDECREF(result);
return NULL;
}
#endif
/* CLineInTraceback */
#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
PyObject **cython_runtime_dict;
#endif
if (unlikely(!__pyx_cython_runtime)) {
return c_line;
}
__Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
#if CYTHON_COMPILING_IN_CPYTHON
cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime);
if (likely(cython_runtime_dict)) {
__PYX_PY_DICT_LOOKUP_IF_MODIFIED(
use_cline, *cython_runtime_dict,
__Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback))
} else
#endif
{
PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback);
if (use_cline_obj) {
use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
Py_DECREF(use_cline_obj);
} else {
PyErr_Clear();
use_cline = NULL;
}
}
if (!use_cline) {
c_line = 0;
PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
}
else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
c_line = 0;
}
__Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
return c_line;
}
#endif
/* CodeObjectCache */
static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
int start = 0, mid = 0, end = count - 1;
if (end >= 0 && code_line > entries[end].code_line) {
return count;
}
while (start < end) {
mid = start + (end - start) / 2;
if (code_line < entries[mid].code_line) {
end = mid;
} else if (code_line > entries[mid].code_line) {
start = mid + 1;
} else {
return mid;
}
}
if (code_line <= entries[mid].code_line) {
return mid;
} else {
return mid + 1;
}
}
static PyCodeObject *__pyx_find_code_object(int code_line) {
PyCodeObject* code_object;
int pos;
if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
return NULL;
}
pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
return NULL;
}
code_object = __pyx_code_cache.entries[pos].code_object;
Py_INCREF(code_object);
return code_object;
}
static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
int pos, i;
__Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
if (unlikely(!code_line)) {
return;
}
if (unlikely(!entries)) {
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
if (likely(entries)) {
__pyx_code_cache.entries = entries;
__pyx_code_cache.max_count = 64;
__pyx_code_cache.count = 1;
entries[0].code_line = code_line;
entries[0].code_object = code_object;
Py_INCREF(code_object);
}
return;
}
pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
PyCodeObject* tmp = entries[pos].code_object;
entries[pos].code_object = code_object;
Py_DECREF(tmp);
return;
}
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}
__pyx_code_cache.entries = entries;
__pyx_code_cache.max_count = new_max;
}
for (i=__pyx_code_cache.count; i>pos; i--) {
entries[i] = entries[i-1];
}
entries[pos].code_line = code_line;
entries[pos].code_object = code_object;
__pyx_code_cache.count++;
Py_INCREF(code_object);
}
/* AddTraceback */
#include "compile.h"
#include "frameobject.h"
#include "traceback.h"
static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = 0;
PyObject *py_srcfile = 0;
PyObject *py_funcname = 0;
#if PY_MAJOR_VERSION < 3
py_srcfile = PyString_FromString(filename);
#else
py_srcfile = PyUnicode_FromString(filename);
#endif
if (!py_srcfile) goto bad;
if (c_line) {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
#else
py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
#endif
}
else {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromString(funcname);
#else
py_funcname = PyUnicode_FromString(funcname);
#endif
}
if (!py_funcname) goto bad;
py_code = __Pyx_PyCode_New(
0,
0,
0,
0,
0,
__pyx_empty_bytes, /*PyObject *code,*/
__pyx_empty_tuple, /*PyObject *consts,*/
__pyx_empty_tuple, /*PyObject *names,*/
__pyx_empty_tuple, /*PyObject *varnames,*/
__pyx_empty_tuple, /*PyObject *freevars,*/
__pyx_empty_tuple, /*PyObject *cellvars,*/
py_srcfile, /*PyObject *filename,*/
py_funcname, /*PyObject *name,*/
py_line,
__pyx_empty_bytes /*PyObject *lnotab*/
);
Py_DECREF(py_srcfile);
Py_DECREF(py_funcname);
return py_code;
bad:
Py_XDECREF(py_srcfile);
Py_XDECREF(py_funcname);
return NULL;
}
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = 0;
PyFrameObject *py_frame = 0;
PyThreadState *tstate = __Pyx_PyThreadState_Current;
if (c_line) {
c_line = __Pyx_CLineForTraceback(tstate, c_line);
}
py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
if (!py_code) {
py_code = __Pyx_CreateCodeObjectForTraceback(
funcname, c_line, py_line, filename);
if (!py_code) goto bad;
__pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
}
py_frame = PyFrame_New(
tstate, /*PyThreadState *tstate,*/
py_code, /*PyCodeObject *code,*/
__pyx_d, /*PyObject *globals,*/
0 /*PyObject *locals*/
);
if (!py_frame) goto bad;
__Pyx_PyFrame_SetLineNumber(py_frame, py_line);
PyTraceBack_Here(py_frame);
bad:
Py_XDECREF(py_code);
Py_XDECREF(py_frame);
}
#if PY_MAJOR_VERSION < 3
static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) {
if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags);
if (__Pyx_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) return __pyx_pw_5numpy_7ndarray_1__getbuffer__(obj, view, flags);
if (__Pyx_TypeCheck(obj, __pyx_array_type)) return __pyx_array_getbuffer(obj, view, flags);
if (__Pyx_TypeCheck(obj, __pyx_memoryview_type)) return __pyx_memoryview_getbuffer(obj, view, flags);
PyErr_Format(PyExc_TypeError, "'%.200s' does not have the buffer interface", Py_TYPE(obj)->tp_name);
return -1;
}
static void __Pyx_ReleaseBuffer(Py_buffer *view) {
PyObject *obj = view->obj;
if (!obj) return;
if (PyObject_CheckBuffer(obj)) {
PyBuffer_Release(view);
return;
}
if ((0)) {}
else if (__Pyx_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) __pyx_pw_5numpy_7ndarray_3__releasebuffer__(obj, view);
view->obj = NULL;
Py_DECREF(obj);
}
#endif
/* MemviewSliceIsContig */
static int
__pyx_memviewslice_is_contig(const __Pyx_memviewslice mvs, char order, int ndim)
{
int i, index, step, start;
Py_ssize_t itemsize = mvs.memview->view.itemsize;
if (order == 'F') {
step = 1;
start = 0;
} else {
step = -1;
start = ndim - 1;
}
for (i = 0; i < ndim; i++) {
index = start + step * i;
if (mvs.suboffsets[index] >= 0 || mvs.strides[index] != itemsize)
return 0;
itemsize *= mvs.shape[index];
}
return 1;
}
/* OverlappingSlices */
static void
__pyx_get_array_memory_extents(__Pyx_memviewslice *slice,
void **out_start, void **out_end,
int ndim, size_t itemsize)
{
char *start, *end;
int i;
start = end = slice->data;
for (i = 0; i < ndim; i++) {
Py_ssize_t stride = slice->strides[i];
Py_ssize_t extent = slice->shape[i];
if (extent == 0) {
*out_start = *out_end = start;
return;
} else {
if (stride > 0)
end += stride * (extent - 1);
else
start += stride * (extent - 1);
}
}
*out_start = start;
*out_end = end + itemsize;
}
static int
__pyx_slices_overlap(__Pyx_memviewslice *slice1,
__Pyx_memviewslice *slice2,
int ndim, size_t itemsize)
{
void *start1, *end1, *start2, *end2;
__pyx_get_array_memory_extents(slice1, &start1, &end1, ndim, itemsize);
__pyx_get_array_memory_extents(slice2, &start2, &end2, ndim, itemsize);
return (start1 < end2) && (start2 < end1);
}
/* Capsule */
static CYTHON_INLINE PyObject *
__pyx_capsule_create(void *p, CYTHON_UNUSED const char *sig)
{
PyObject *cobj;
#if PY_VERSION_HEX >= 0x02070000
cobj = PyCapsule_New(p, sig, NULL);
#else
cobj = PyCObject_FromVoidPtr(p, NULL);
#endif
return cobj;
}
/* IsLittleEndian */
static CYTHON_INLINE int __Pyx_Is_Little_Endian(void)
{
union {
uint32_t u32;
uint8_t u8[4];
} S;
S.u32 = 0x01020304;
return S.u8[0] == 4;
}
/* BufferFormatCheck */
static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx,
__Pyx_BufFmt_StackElem* stack,
__Pyx_TypeInfo* type) {
stack[0].field = &ctx->root;
stack[0].parent_offset = 0;
ctx->root.type = type;
ctx->root.name = "buffer dtype";
ctx->root.offset = 0;
ctx->head = stack;
ctx->head->field = &ctx->root;
ctx->fmt_offset = 0;
ctx->head->parent_offset = 0;
ctx->new_packmode = '@';
ctx->enc_packmode = '@';
ctx->new_count = 1;
ctx->enc_count = 0;
ctx->enc_type = 0;
ctx->is_complex = 0;
ctx->is_valid_array = 0;
ctx->struct_alignment = 0;
while (type->typegroup == 'S') {
++ctx->head;
ctx->head->field = type->fields;
ctx->head->parent_offset = 0;
type = type->fields->type;
}
}
static int __Pyx_BufFmt_ParseNumber(const char** ts) {
int count;
const char* t = *ts;
if (*t < '0' || *t > '9') {
return -1;
} else {
count = *t++ - '0';
while (*t >= '0' && *t <= '9') {
count *= 10;
count += *t++ - '0';
}
}
*ts = t;
return count;
}
static int __Pyx_BufFmt_ExpectNumber(const char **ts) {
int number = __Pyx_BufFmt_ParseNumber(ts);
if (number == -1)
PyErr_Format(PyExc_ValueError,\
"Does not understand character buffer dtype format string ('%c')", **ts);
return number;
}
static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) {
PyErr_Format(PyExc_ValueError,
"Unexpected format string character: '%c'", ch);
}
static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) {
switch (ch) {
case '?': return "'bool'";
case 'c': return "'char'";
case 'b': return "'signed char'";
case 'B': return "'unsigned char'";
case 'h': return "'short'";
case 'H': return "'unsigned short'";
case 'i': return "'int'";
case 'I': return "'unsigned int'";
case 'l': return "'long'";
case 'L': return "'unsigned long'";
case 'q': return "'long long'";
case 'Q': return "'unsigned long long'";
case 'f': return (is_complex ? "'complex float'" : "'float'");
case 'd': return (is_complex ? "'complex double'" : "'double'");
case 'g': return (is_complex ? "'complex long double'" : "'long double'");
case 'T': return "a struct";
case 'O': return "Python object";
case 'P': return "a pointer";
case 's': case 'p': return "a string";
case 0: return "end";
default: return "unparseable format string";
}
}
static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) {
switch (ch) {
case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1;
case 'h': case 'H': return 2;
case 'i': case 'I': case 'l': case 'L': return 4;
case 'q': case 'Q': return 8;
case 'f': return (is_complex ? 8 : 4);
case 'd': return (is_complex ? 16 : 8);
case 'g': {
PyErr_SetString(PyExc_ValueError, "Python does not define a standard format string size for long double ('g')..");
return 0;
}
case 'O': case 'P': return sizeof(void*);
default:
__Pyx_BufFmt_RaiseUnexpectedChar(ch);
return 0;
}
}
static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) {
switch (ch) {
case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1;
case 'h': case 'H': return sizeof(short);
case 'i': case 'I': return sizeof(int);
case 'l': case 'L': return sizeof(long);
#ifdef HAVE_LONG_LONG
case 'q': case 'Q': return sizeof(PY_LONG_LONG);
#endif
case 'f': return sizeof(float) * (is_complex ? 2 : 1);
case 'd': return sizeof(double) * (is_complex ? 2 : 1);
case 'g': return sizeof(long double) * (is_complex ? 2 : 1);
case 'O': case 'P': return sizeof(void*);
default: {
__Pyx_BufFmt_RaiseUnexpectedChar(ch);
return 0;
}
}
}
typedef struct { char c; short x; } __Pyx_st_short;
typedef struct { char c; int x; } __Pyx_st_int;
typedef struct { char c; long x; } __Pyx_st_long;
typedef struct { char c; float x; } __Pyx_st_float;
typedef struct { char c; double x; } __Pyx_st_double;
typedef struct { char c; long double x; } __Pyx_st_longdouble;
typedef struct { char c; void *x; } __Pyx_st_void_p;
#ifdef HAVE_LONG_LONG
typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong;
#endif
static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) {
switch (ch) {
case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1;
case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short);
case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int);
case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long);
#ifdef HAVE_LONG_LONG
case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG);
#endif
case 'f': return sizeof(__Pyx_st_float) - sizeof(float);
case 'd': return sizeof(__Pyx_st_double) - sizeof(double);
case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double);
case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*);
default:
__Pyx_BufFmt_RaiseUnexpectedChar(ch);
return 0;
}
}
/* These are for computing the padding at the end of the struct to align
on the first member of the struct. This will probably the same as above,
but we don't have any guarantees.
*/
typedef struct { short x; char c; } __Pyx_pad_short;
typedef struct { int x; char c; } __Pyx_pad_int;
typedef struct { long x; char c; } __Pyx_pad_long;
typedef struct { float x; char c; } __Pyx_pad_float;
typedef struct { double x; char c; } __Pyx_pad_double;
typedef struct { long double x; char c; } __Pyx_pad_longdouble;
typedef struct { void *x; char c; } __Pyx_pad_void_p;
#ifdef HAVE_LONG_LONG
typedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong;
#endif
static size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) {
switch (ch) {
case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1;
case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short);
case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int);
case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long);
#ifdef HAVE_LONG_LONG
case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG);
#endif
case 'f': return sizeof(__Pyx_pad_float) - sizeof(float);
case 'd': return sizeof(__Pyx_pad_double) - sizeof(double);
case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double);
case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*);
default:
__Pyx_BufFmt_RaiseUnexpectedChar(ch);
return 0;
}
}
static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) {
switch (ch) {
case 'c':
return 'H';
case 'b': case 'h': case 'i':
case 'l': case 'q': case 's': case 'p':
return 'I';
case '?': case 'B': case 'H': case 'I': case 'L': case 'Q':
return 'U';
case 'f': case 'd': case 'g':
return (is_complex ? 'C' : 'R');
case 'O':
return 'O';
case 'P':
return 'P';
default: {
__Pyx_BufFmt_RaiseUnexpectedChar(ch);
return 0;
}
}
}
static void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) {
if (ctx->head == NULL || ctx->head->field == &ctx->root) {
const char* expected;
const char* quote;
if (ctx->head == NULL) {
expected = "end";
quote = "";
} else {
expected = ctx->head->field->type->name;
quote = "'";
}
PyErr_Format(PyExc_ValueError,
"Buffer dtype mismatch, expected %s%s%s but got %s",
quote, expected, quote,
__Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex));
} else {
__Pyx_StructField* field = ctx->head->field;
__Pyx_StructField* parent = (ctx->head - 1)->field;
PyErr_Format(PyExc_ValueError,
"Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'",
field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex),
parent->type->name, field->name);
}
}
static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {
char group;
size_t size, offset, arraysize = 1;
if (ctx->enc_type == 0) return 0;
if (ctx->head->field->type->arraysize[0]) {
int i, ndim = 0;
if (ctx->enc_type == 's' || ctx->enc_type == 'p') {
ctx->is_valid_array = ctx->head->field->type->ndim == 1;
ndim = 1;
if (ctx->enc_count != ctx->head->field->type->arraysize[0]) {
PyErr_Format(PyExc_ValueError,
"Expected a dimension of size %zu, got %zu",
ctx->head->field->type->arraysize[0], ctx->enc_count);
return -1;
}
}
if (!ctx->is_valid_array) {
PyErr_Format(PyExc_ValueError, "Expected %d dimensions, got %d",
ctx->head->field->type->ndim, ndim);
return -1;
}
for (i = 0; i < ctx->head->field->type->ndim; i++) {
arraysize *= ctx->head->field->type->arraysize[i];
}
ctx->is_valid_array = 0;
ctx->enc_count = 1;
}
group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex);
do {
__Pyx_StructField* field = ctx->head->field;
__Pyx_TypeInfo* type = field->type;
if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') {
size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex);
} else {
size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex);
}
if (ctx->enc_packmode == '@') {
size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex);
size_t align_mod_offset;
if (align_at == 0) return -1;
align_mod_offset = ctx->fmt_offset % align_at;
if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset;
if (ctx->struct_alignment == 0)
ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type,
ctx->is_complex);
}
if (type->size != size || type->typegroup != group) {
if (type->typegroup == 'C' && type->fields != NULL) {
size_t parent_offset = ctx->head->parent_offset + field->offset;
++ctx->head;
ctx->head->field = type->fields;
ctx->head->parent_offset = parent_offset;
continue;
}
if ((type->typegroup == 'H' || group == 'H') && type->size == size) {
} else {
__Pyx_BufFmt_RaiseExpected(ctx);
return -1;
}
}
offset = ctx->head->parent_offset + field->offset;
if (ctx->fmt_offset != offset) {
PyErr_Format(PyExc_ValueError,
"Buffer dtype mismatch; next field is at offset %" CYTHON_FORMAT_SSIZE_T "d but %" CYTHON_FORMAT_SSIZE_T "d expected",
(Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset);
return -1;
}
ctx->fmt_offset += size;
if (arraysize)
ctx->fmt_offset += (arraysize - 1) * size;
--ctx->enc_count;
while (1) {
if (field == &ctx->root) {
ctx->head = NULL;
if (ctx->enc_count != 0) {
__Pyx_BufFmt_RaiseExpected(ctx);
return -1;
}
break;
}
ctx->head->field = ++field;
if (field->type == NULL) {
--ctx->head;
field = ctx->head->field;
continue;
} else if (field->type->typegroup == 'S') {
size_t parent_offset = ctx->head->parent_offset + field->offset;
if (field->type->fields->type == NULL) continue;
field = field->type->fields;
++ctx->head;
ctx->head->field = field;
ctx->head->parent_offset = parent_offset;
break;
} else {
break;
}
}
} while (ctx->enc_count);
ctx->enc_type = 0;
ctx->is_complex = 0;
return 0;
}
static PyObject *
__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp)
{
const char *ts = *tsp;
int i = 0, number;
int ndim = ctx->head->field->type->ndim;
;
++ts;
if (ctx->new_count != 1) {
PyErr_SetString(PyExc_ValueError,
"Cannot handle repeated arrays in format string");
return NULL;
}
if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;
while (*ts && *ts != ')') {
switch (*ts) {
case ' ': case '\f': case '\r': case '\n': case '\t': case '\v': continue;
default: break;
}
number = __Pyx_BufFmt_ExpectNumber(&ts);
if (number == -1) return NULL;
if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i])
return PyErr_Format(PyExc_ValueError,
"Expected a dimension of size %zu, got %d",
ctx->head->field->type->arraysize[i], number);
if (*ts != ',' && *ts != ')')
return PyErr_Format(PyExc_ValueError,
"Expected a comma in format string, got '%c'", *ts);
if (*ts == ',') ts++;
i++;
}
if (i != ndim)
return PyErr_Format(PyExc_ValueError, "Expected %d dimension(s), got %d",
ctx->head->field->type->ndim, i);
if (!*ts) {
PyErr_SetString(PyExc_ValueError,
"Unexpected end of format string, expected ')'");
return NULL;
}
ctx->is_valid_array = 1;
ctx->new_count = 1;
*tsp = ++ts;
return Py_None;
}
static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) {
int got_Z = 0;
while (1) {
switch(*ts) {
case 0:
if (ctx->enc_type != 0 && ctx->head == NULL) {
__Pyx_BufFmt_RaiseExpected(ctx);
return NULL;
}
if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;
if (ctx->head != NULL) {
__Pyx_BufFmt_RaiseExpected(ctx);
return NULL;
}
return ts;
case ' ':
case '\r':
case '\n':
++ts;
break;
case '<':
if (!__Pyx_Is_Little_Endian()) {
PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler");
return NULL;
}
ctx->new_packmode = '=';
++ts;
break;
case '>':
case '!':
if (__Pyx_Is_Little_Endian()) {
PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler");
return NULL;
}
ctx->new_packmode = '=';
++ts;
break;
case '=':
case '@':
case '^':
ctx->new_packmode = *ts++;
break;
case 'T':
{
const char* ts_after_sub;
size_t i, struct_count = ctx->new_count;
size_t struct_alignment = ctx->struct_alignment;
ctx->new_count = 1;
++ts;
if (*ts != '{') {
PyErr_SetString(PyExc_ValueError, "Buffer acquisition: Expected '{' after 'T'");
return NULL;
}
if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;
ctx->enc_type = 0;
ctx->enc_count = 0;
ctx->struct_alignment = 0;
++ts;
ts_after_sub = ts;
for (i = 0; i != struct_count; ++i) {
ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts);
if (!ts_after_sub) return NULL;
}
ts = ts_after_sub;
if (struct_alignment) ctx->struct_alignment = struct_alignment;
}
break;
case '}':
{
size_t alignment = ctx->struct_alignment;
++ts;
if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;
ctx->enc_type = 0;
if (alignment && ctx->fmt_offset % alignment) {
ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment);
}
}
return ts;
case 'x':
if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;
ctx->fmt_offset += ctx->new_count;
ctx->new_count = 1;
ctx->enc_count = 0;
ctx->enc_type = 0;
ctx->enc_packmode = ctx->new_packmode;
++ts;
break;
case 'Z':
got_Z = 1;
++ts;
if (*ts != 'f' && *ts != 'd' && *ts != 'g') {
__Pyx_BufFmt_RaiseUnexpectedChar('Z');
return NULL;
}
CYTHON_FALLTHROUGH;
case '?': case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I':
case 'l': case 'L': case 'q': case 'Q':
case 'f': case 'd': case 'g':
case 'O': case 'p':
if (ctx->enc_type == *ts && got_Z == ctx->is_complex &&
ctx->enc_packmode == ctx->new_packmode) {
ctx->enc_count += ctx->new_count;
ctx->new_count = 1;
got_Z = 0;
++ts;
break;
}
CYTHON_FALLTHROUGH;
case 's':
if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;
ctx->enc_count = ctx->new_count;
ctx->enc_packmode = ctx->new_packmode;
ctx->enc_type = *ts;
ctx->is_complex = got_Z;
++ts;
ctx->new_count = 1;
got_Z = 0;
break;
case ':':
++ts;
while(*ts != ':') ++ts;
++ts;
break;
case '(':
if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL;
break;
default:
{
int number = __Pyx_BufFmt_ExpectNumber(&ts);
if (number == -1) return NULL;
ctx->new_count = (size_t)number;
}
}
}
}
/* TypeInfoCompare */
static int
__pyx_typeinfo_cmp(__Pyx_TypeInfo *a, __Pyx_TypeInfo *b)
{
int i;
if (!a || !b)
return 0;
if (a == b)
return 1;
if (a->size != b->size || a->typegroup != b->typegroup ||
a->is_unsigned != b->is_unsigned || a->ndim != b->ndim) {
if (a->typegroup == 'H' || b->typegroup == 'H') {
return a->size == b->size;
} else {
return 0;
}
}
if (a->ndim) {
for (i = 0; i < a->ndim; i++)
if (a->arraysize[i] != b->arraysize[i])
return 0;
}
if (a->typegroup == 'S') {
if (a->flags != b->flags)
return 0;
if (a->fields || b->fields) {
if (!(a->fields && b->fields))
return 0;
for (i = 0; a->fields[i].type && b->fields[i].type; i++) {
__Pyx_StructField *field_a = a->fields + i;
__Pyx_StructField *field_b = b->fields + i;
if (field_a->offset != field_b->offset ||
!__pyx_typeinfo_cmp(field_a->type, field_b->type))
return 0;
}
return !a->fields[i].type && !b->fields[i].type;
}
}
return 1;
}
/* MemviewSliceValidateAndInit */
static int
__pyx_check_strides(Py_buffer *buf, int dim, int ndim, int spec)
{
if (buf->shape[dim] <= 1)
return 1;
if (buf->strides) {
if (spec & __Pyx_MEMVIEW_CONTIG) {
if (spec & (__Pyx_MEMVIEW_PTR|__Pyx_MEMVIEW_FULL)) {
if (buf->strides[dim] != sizeof(void *)) {
PyErr_Format(PyExc_ValueError,
"Buffer is not indirectly contiguous "
"in dimension %d.", dim);
goto fail;
}
} else if (buf->strides[dim] != buf->itemsize) {
PyErr_SetString(PyExc_ValueError,
"Buffer and memoryview are not contiguous "
"in the same dimension.");
goto fail;
}
}
if (spec & __Pyx_MEMVIEW_FOLLOW) {
Py_ssize_t stride = buf->strides[dim];
if (stride < 0)
stride = -stride;
if (stride < buf->itemsize) {
PyErr_SetString(PyExc_ValueError,
"Buffer and memoryview are not contiguous "
"in the same dimension.");
goto fail;
}
}
} else {
if (spec & __Pyx_MEMVIEW_CONTIG && dim != ndim - 1) {
PyErr_Format(PyExc_ValueError,
"C-contiguous buffer is not contiguous in "
"dimension %d", dim);
goto fail;
} else if (spec & (__Pyx_MEMVIEW_PTR)) {
PyErr_Format(PyExc_ValueError,
"C-contiguous buffer is not indirect in "
"dimension %d", dim);
goto fail;
} else if (buf->suboffsets) {
PyErr_SetString(PyExc_ValueError,
"Buffer exposes suboffsets but no strides");
goto fail;
}
}
return 1;
fail:
return 0;
}
static int
__pyx_check_suboffsets(Py_buffer *buf, int dim, CYTHON_UNUSED int ndim, int spec)
{
if (spec & __Pyx_MEMVIEW_DIRECT) {
if (buf->suboffsets && buf->suboffsets[dim] >= 0) {
PyErr_Format(PyExc_ValueError,
"Buffer not compatible with direct access "
"in dimension %d.", dim);
goto fail;
}
}
if (spec & __Pyx_MEMVIEW_PTR) {
if (!buf->suboffsets || (buf->suboffsets[dim] < 0)) {
PyErr_Format(PyExc_ValueError,
"Buffer is not indirectly accessible "
"in dimension %d.", dim);
goto fail;
}
}
return 1;
fail:
return 0;
}
static int
__pyx_verify_contig(Py_buffer *buf, int ndim, int c_or_f_flag)
{
int i;
if (c_or_f_flag & __Pyx_IS_F_CONTIG) {
Py_ssize_t stride = 1;
for (i = 0; i < ndim; i++) {
if (stride * buf->itemsize != buf->strides[i] &&
buf->shape[i] > 1)
{
PyErr_SetString(PyExc_ValueError,
"Buffer not fortran contiguous.");
goto fail;
}
stride = stride * buf->shape[i];
}
} else if (c_or_f_flag & __Pyx_IS_C_CONTIG) {
Py_ssize_t stride = 1;
for (i = ndim - 1; i >- 1; i--) {
if (stride * buf->itemsize != buf->strides[i] &&
buf->shape[i] > 1) {
PyErr_SetString(PyExc_ValueError,
"Buffer not C contiguous.");
goto fail;
}
stride = stride * buf->shape[i];
}
}
return 1;
fail:
return 0;
}
static int __Pyx_ValidateAndInit_memviewslice(
int *axes_specs,
int c_or_f_flag,
int buf_flags,
int ndim,
__Pyx_TypeInfo *dtype,
__Pyx_BufFmt_StackElem stack[],
__Pyx_memviewslice *memviewslice,
PyObject *original_obj)
{
struct __pyx_memoryview_obj *memview, *new_memview;
__Pyx_RefNannyDeclarations
Py_buffer *buf;
int i, spec = 0, retval = -1;
__Pyx_BufFmt_Context ctx;
int from_memoryview = __pyx_memoryview_check(original_obj);
__Pyx_RefNannySetupContext("ValidateAndInit_memviewslice", 0);
if (from_memoryview && __pyx_typeinfo_cmp(dtype, ((struct __pyx_memoryview_obj *)
original_obj)->typeinfo)) {
memview = (struct __pyx_memoryview_obj *) original_obj;
new_memview = NULL;
} else {
memview = (struct __pyx_memoryview_obj *) __pyx_memoryview_new(
original_obj, buf_flags, 0, dtype);
new_memview = memview;
if (unlikely(!memview))
goto fail;
}
buf = &memview->view;
if (buf->ndim != ndim) {
PyErr_Format(PyExc_ValueError,
"Buffer has wrong number of dimensions (expected %d, got %d)",
ndim, buf->ndim);
goto fail;
}
if (new_memview) {
__Pyx_BufFmt_Init(&ctx, stack, dtype);
if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail;
}
if ((unsigned) buf->itemsize != dtype->size) {
PyErr_Format(PyExc_ValueError,
"Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "u byte%s) "
"does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "u byte%s)",
buf->itemsize,
(buf->itemsize > 1) ? "s" : "",
dtype->name,
dtype->size,
(dtype->size > 1) ? "s" : "");
goto fail;
}
for (i = 0; i < ndim; i++) {
spec = axes_specs[i];
if (!__pyx_check_strides(buf, i, ndim, spec))
goto fail;
if (!__pyx_check_suboffsets(buf, i, ndim, spec))
goto fail;
}
if (buf->strides && !__pyx_verify_contig(buf, ndim, c_or_f_flag))
goto fail;
if (unlikely(__Pyx_init_memviewslice(memview, ndim, memviewslice,
new_memview != NULL) == -1)) {
goto fail;
}
retval = 0;
goto no_fail;
fail:
Py_XDECREF(new_memview);
retval = -1;
no_fail:
__Pyx_RefNannyFinishContext();
return retval;
}
/* ObjectToMemviewSlice */
static CYTHON_INLINE __Pyx_memviewslice __Pyx_PyObject_to_MemoryviewSlice_ds_float(PyObject *obj, int writable_flag) {
__Pyx_memviewslice result = { 0, 0, { 0 }, { 0 }, { 0 } };
__Pyx_BufFmt_StackElem stack[1];
int axes_specs[] = { (__Pyx_MEMVIEW_DIRECT | __Pyx_MEMVIEW_STRIDED) };
int retcode;
if (obj == Py_None) {
result.memview = (struct __pyx_memoryview_obj *) Py_None;
return result;
}
retcode = __Pyx_ValidateAndInit_memviewslice(axes_specs, 0,
PyBUF_RECORDS_RO | writable_flag, 1,
&__Pyx_TypeInfo_float, stack,
&result, obj);
if (unlikely(retcode == -1))
goto __pyx_fail;
return result;
__pyx_fail:
result.memview = NULL;
result.data = NULL;
return result;
}
/* CIntFromPyVerify */
#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\
__PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)
#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\
__PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)
#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\
{\
func_type value = func_value;\
if (sizeof(target_type) < sizeof(func_type)) {\
if (unlikely(value != (func_type) (target_type) value)) {\
func_type zero = 0;\
if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\
return (target_type) -1;\
if (is_unsigned && unlikely(value < zero))\
goto raise_neg_overflow;\
else\
goto raise_overflow;\
}\
}\
return (target_type) value;\
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) {
const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(int) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(int) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(int) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(int),
little, !is_unsigned);
}
}
/* MemviewDtypeToObject */
static CYTHON_INLINE PyObject *__pyx_memview_get_float(const char *itemp) {
return (PyObject *) PyFloat_FromDouble(*(float *) itemp);
}
static CYTHON_INLINE int __pyx_memview_set_float(const char *itemp, PyObject *obj) {
float value = __pyx_PyFloat_AsFloat(obj);
if ((value == (float)-1) && PyErr_Occurred())
return 0;
*(float *) itemp = value;
return 1;
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(long) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(long) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(long) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(long),
little, !is_unsigned);
}
}
/* Declarations */
#if CYTHON_CCOMPLEX
#ifdef __cplusplus
static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) {
return ::std::complex< float >(x, y);
}
#else
static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) {
return x + y*(__pyx_t_float_complex)_Complex_I;
}
#endif
#else
static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) {
__pyx_t_float_complex z;
z.real = x;
z.imag = y;
return z;
}
#endif
/* Arithmetic */
#if CYTHON_CCOMPLEX
#else
static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {
return (a.real == b.real) && (a.imag == b.imag);
}
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {
__pyx_t_float_complex z;
z.real = a.real + b.real;
z.imag = a.imag + b.imag;
return z;
}
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {
__pyx_t_float_complex z;
z.real = a.real - b.real;
z.imag = a.imag - b.imag;
return z;
}
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {
__pyx_t_float_complex z;
z.real = a.real * b.real - a.imag * b.imag;
z.imag = a.real * b.imag + a.imag * b.real;
return z;
}
#if 1
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {
if (b.imag == 0) {
return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real);
} else if (fabsf(b.real) >= fabsf(b.imag)) {
if (b.real == 0 && b.imag == 0) {
return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.imag);
} else {
float r = b.imag / b.real;
float s = (float)(1.0) / (b.real + b.imag * r);
return __pyx_t_float_complex_from_parts(
(a.real + a.imag * r) * s, (a.imag - a.real * r) * s);
}
} else {
float r = b.real / b.imag;
float s = (float)(1.0) / (b.imag + b.real * r);
return __pyx_t_float_complex_from_parts(
(a.real * r + a.imag) * s, (a.imag * r - a.real) * s);
}
}
#else
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {
if (b.imag == 0) {
return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real);
} else {
float denom = b.real * b.real + b.imag * b.imag;
return __pyx_t_float_complex_from_parts(
(a.real * b.real + a.imag * b.imag) / denom,
(a.imag * b.real - a.real * b.imag) / denom);
}
}
#endif
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex a) {
__pyx_t_float_complex z;
z.real = -a.real;
z.imag = -a.imag;
return z;
}
static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex a) {
return (a.real == 0) && (a.imag == 0);
}
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex a) {
__pyx_t_float_complex z;
z.real = a.real;
z.imag = -a.imag;
return z;
}
#if 1
static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex z) {
#if !defined(HAVE_HYPOT) || defined(_MSC_VER)
return sqrtf(z.real*z.real + z.imag*z.imag);
#else
return hypotf(z.real, z.imag);
#endif
}
static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {
__pyx_t_float_complex z;
float r, lnr, theta, z_r, z_theta;
if (b.imag == 0 && b.real == (int)b.real) {
if (b.real < 0) {
float denom = a.real * a.real + a.imag * a.imag;
a.real = a.real / denom;
a.imag = -a.imag / denom;
b.real = -b.real;
}
switch ((int)b.real) {
case 0:
z.real = 1;
z.imag = 0;
return z;
case 1:
return a;
case 2:
return __Pyx_c_prod_float(a, a);
case 3:
z = __Pyx_c_prod_float(a, a);
return __Pyx_c_prod_float(z, a);
case 4:
z = __Pyx_c_prod_float(a, a);
return __Pyx_c_prod_float(z, z);
}
}
if (a.imag == 0) {
if (a.real == 0) {
return a;
} else if (b.imag == 0) {
z.real = powf(a.real, b.real);
z.imag = 0;
return z;
} else if (a.real > 0) {
r = a.real;
theta = 0;
} else {
r = -a.real;
theta = atan2f(0.0, -1.0);
}
} else {
r = __Pyx_c_abs_float(a);
theta = atan2f(a.imag, a.real);
}
lnr = logf(r);
z_r = expf(lnr * b.real - theta * b.imag);
z_theta = theta * b.real + lnr * b.imag;
z.real = z_r * cosf(z_theta);
z.imag = z_r * sinf(z_theta);
return z;
}
#endif
#endif
/* Declarations */
#if CYTHON_CCOMPLEX
#ifdef __cplusplus
static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) {
return ::std::complex< double >(x, y);
}
#else
static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) {
return x + y*(__pyx_t_double_complex)_Complex_I;
}
#endif
#else
static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) {
__pyx_t_double_complex z;
z.real = x;
z.imag = y;
return z;
}
#endif
/* Arithmetic */
#if CYTHON_CCOMPLEX
#else
static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {
return (a.real == b.real) && (a.imag == b.imag);
}
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {
__pyx_t_double_complex z;
z.real = a.real + b.real;
z.imag = a.imag + b.imag;
return z;
}
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {
__pyx_t_double_complex z;
z.real = a.real - b.real;
z.imag = a.imag - b.imag;
return z;
}
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {
__pyx_t_double_complex z;
z.real = a.real * b.real - a.imag * b.imag;
z.imag = a.real * b.imag + a.imag * b.real;
return z;
}
#if 1
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {
if (b.imag == 0) {
return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real);
} else if (fabs(b.real) >= fabs(b.imag)) {
if (b.real == 0 && b.imag == 0) {
return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.imag);
} else {
double r = b.imag / b.real;
double s = (double)(1.0) / (b.real + b.imag * r);
return __pyx_t_double_complex_from_parts(
(a.real + a.imag * r) * s, (a.imag - a.real * r) * s);
}
} else {
double r = b.real / b.imag;
double s = (double)(1.0) / (b.imag + b.real * r);
return __pyx_t_double_complex_from_parts(
(a.real * r + a.imag) * s, (a.imag * r - a.real) * s);
}
}
#else
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {
if (b.imag == 0) {
return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real);
} else {
double denom = b.real * b.real + b.imag * b.imag;
return __pyx_t_double_complex_from_parts(
(a.real * b.real + a.imag * b.imag) / denom,
(a.imag * b.real - a.real * b.imag) / denom);
}
}
#endif
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex a) {
__pyx_t_double_complex z;
z.real = -a.real;
z.imag = -a.imag;
return z;
}
static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex a) {
return (a.real == 0) && (a.imag == 0);
}
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex a) {
__pyx_t_double_complex z;
z.real = a.real;
z.imag = -a.imag;
return z;
}
#if 1
static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex z) {
#if !defined(HAVE_HYPOT) || defined(_MSC_VER)
return sqrt(z.real*z.real + z.imag*z.imag);
#else
return hypot(z.real, z.imag);
#endif
}
static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {
__pyx_t_double_complex z;
double r, lnr, theta, z_r, z_theta;
if (b.imag == 0 && b.real == (int)b.real) {
if (b.real < 0) {
double denom = a.real * a.real + a.imag * a.imag;
a.real = a.real / denom;
a.imag = -a.imag / denom;
b.real = -b.real;
}
switch ((int)b.real) {
case 0:
z.real = 1;
z.imag = 0;
return z;
case 1:
return a;
case 2:
return __Pyx_c_prod_double(a, a);
case 3:
z = __Pyx_c_prod_double(a, a);
return __Pyx_c_prod_double(z, a);
case 4:
z = __Pyx_c_prod_double(a, a);
return __Pyx_c_prod_double(z, z);
}
}
if (a.imag == 0) {
if (a.real == 0) {
return a;
} else if (b.imag == 0) {
z.real = pow(a.real, b.real);
z.imag = 0;
return z;
} else if (a.real > 0) {
r = a.real;
theta = 0;
} else {
r = -a.real;
theta = atan2(0.0, -1.0);
}
} else {
r = __Pyx_c_abs_double(a);
theta = atan2(a.imag, a.real);
}
lnr = log(r);
z_r = exp(lnr * b.real - theta * b.imag);
z_theta = theta * b.real + lnr * b.imag;
z.real = z_r * cos(z_theta);
z.imag = z_r * sin(z_theta);
return z;
}
#endif
#endif
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value) {
const enum NPY_TYPES neg_one = (enum NPY_TYPES) ((enum NPY_TYPES) 0 - (enum NPY_TYPES) 1), const_zero = (enum NPY_TYPES) 0;
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(enum NPY_TYPES) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(enum NPY_TYPES) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(enum NPY_TYPES) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(enum NPY_TYPES),
little, !is_unsigned);
}
}
/* MemviewSliceCopyTemplate */
static __Pyx_memviewslice
__pyx_memoryview_copy_new_contig(const __Pyx_memviewslice *from_mvs,
const char *mode, int ndim,
size_t sizeof_dtype, int contig_flag,
int dtype_is_object)
{
__Pyx_RefNannyDeclarations
int i;
__Pyx_memviewslice new_mvs = { 0, 0, { 0 }, { 0 }, { 0 } };
struct __pyx_memoryview_obj *from_memview = from_mvs->memview;
Py_buffer *buf = &from_memview->view;
PyObject *shape_tuple = NULL;
PyObject *temp_int = NULL;
struct __pyx_array_obj *array_obj = NULL;
struct __pyx_memoryview_obj *memview_obj = NULL;
__Pyx_RefNannySetupContext("__pyx_memoryview_copy_new_contig", 0);
for (i = 0; i < ndim; i++) {
if (from_mvs->suboffsets[i] >= 0) {
PyErr_Format(PyExc_ValueError, "Cannot copy memoryview slice with "
"indirect dimensions (axis %d)", i);
goto fail;
}
}
shape_tuple = PyTuple_New(ndim);
if (unlikely(!shape_tuple)) {
goto fail;
}
__Pyx_GOTREF(shape_tuple);
for(i = 0; i < ndim; i++) {
temp_int = PyInt_FromSsize_t(from_mvs->shape[i]);
if(unlikely(!temp_int)) {
goto fail;
} else {
PyTuple_SET_ITEM(shape_tuple, i, temp_int);
temp_int = NULL;
}
}
array_obj = __pyx_array_new(shape_tuple, sizeof_dtype, buf->format, (char *) mode, NULL);
if (unlikely(!array_obj)) {
goto fail;
}
__Pyx_GOTREF(array_obj);
memview_obj = (struct __pyx_memoryview_obj *) __pyx_memoryview_new(
(PyObject *) array_obj, contig_flag,
dtype_is_object,
from_mvs->memview->typeinfo);
if (unlikely(!memview_obj))
goto fail;
if (unlikely(__Pyx_init_memviewslice(memview_obj, ndim, &new_mvs, 1) < 0))
goto fail;
if (unlikely(__pyx_memoryview_copy_contents(*from_mvs, new_mvs, ndim, ndim,
dtype_is_object) < 0))
goto fail;
goto no_fail;
fail:
__Pyx_XDECREF(new_mvs.memview);
new_mvs.memview = NULL;
new_mvs.data = NULL;
no_fail:
__Pyx_XDECREF(shape_tuple);
__Pyx_XDECREF(temp_int);
__Pyx_XDECREF(array_obj);
__Pyx_RefNannyFinishContext();
return new_mvs;
}
/* CIntFromPy */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(int) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (int) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (int) 0;
case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])
case 2:
if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {
return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {
return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {
return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (int) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(int) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
#endif
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (int) 0;
case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0])
case -2:
if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
}
#endif
if (sizeof(int) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))
#endif
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
int val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (int) -1;
}
} else {
int val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (int) -1;
val = __Pyx_PyInt_As_int(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to int");
return (int) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to int");
return (int) -1;
}
/* CIntFromPy */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(long) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (long) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (long) 0;
case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])
case 2:
if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {
return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {
return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {
return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (long) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(long) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
#endif
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (long) 0;
case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0])
case -2:
if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
}
#endif
if (sizeof(long) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))
#endif
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
long val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (long) -1;
}
} else {
long val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (long) -1;
val = __Pyx_PyInt_As_long(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to long");
return (long) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to long");
return (long) -1;
}
/* CIntFromPy */
static CYTHON_INLINE char __Pyx_PyInt_As_char(PyObject *x) {
const char neg_one = (char) ((char) 0 - (char) 1), const_zero = (char) 0;
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(char) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(char, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (char) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (char) 0;
case 1: __PYX_VERIFY_RETURN_INT(char, digit, digits[0])
case 2:
if (8 * sizeof(char) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) >= 2 * PyLong_SHIFT) {
return (char) (((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(char) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) >= 3 * PyLong_SHIFT) {
return (char) (((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(char) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) >= 4 * PyLong_SHIFT) {
return (char) (((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (char) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(char) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(char, unsigned long, PyLong_AsUnsignedLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(char) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(char, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
#endif
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (char) 0;
case -1: __PYX_VERIFY_RETURN_INT(char, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(char, digit, +digits[0])
case -2:
if (8 * sizeof(char) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) - 1 > 2 * PyLong_SHIFT) {
return (char) (((char)-1)*(((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(char) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) - 1 > 2 * PyLong_SHIFT) {
return (char) ((((((char)digits[1]) << PyLong_SHIFT) | (char)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(char) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) - 1 > 3 * PyLong_SHIFT) {
return (char) (((char)-1)*(((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(char) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) - 1 > 3 * PyLong_SHIFT) {
return (char) ((((((((char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(char) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) - 1 > 4 * PyLong_SHIFT) {
return (char) (((char)-1)*(((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(char) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(char, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(char) - 1 > 4 * PyLong_SHIFT) {
return (char) ((((((((((char)digits[3]) << PyLong_SHIFT) | (char)digits[2]) << PyLong_SHIFT) | (char)digits[1]) << PyLong_SHIFT) | (char)digits[0])));
}
}
break;
}
#endif
if (sizeof(char) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(char, long, PyLong_AsLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(char) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(char, PY_LONG_LONG, PyLong_AsLongLong(x))
#endif
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
char val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (char) -1;
}
} else {
char val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (char) -1;
val = __Pyx_PyInt_As_char(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to char");
return (char) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to char");
return (char) -1;
}
/* CheckBinaryVersion */
static int __Pyx_check_binary_version(void) {
char ctversion[4], rtversion[4];
PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
char message[200];
PyOS_snprintf(message, sizeof(message),
"compiletime version %s of module '%.100s' "
"does not match runtime version %s",
ctversion, __Pyx_MODULE_NAME, rtversion);
return PyErr_WarnEx(NULL, message, 1);
}
return 0;
}
/* InitStrings */
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
while (t->p) {
#if PY_MAJOR_VERSION < 3
if (t->is_unicode) {
*t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
} else if (t->intern) {
*t->p = PyString_InternFromString(t->s);
} else {
*t->p = PyString_FromStringAndSize(t->s, t->n - 1);
}
#else
if (t->is_unicode | t->is_str) {
if (t->intern) {
*t->p = PyUnicode_InternFromString(t->s);
} else if (t->encoding) {
*t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
} else {
*t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
}
} else {
*t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
}
#endif
if (!*t->p)
return -1;
if (PyObject_Hash(*t->p) == -1)
return -1;
++t;
}
return 0;
}
static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
}
static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
Py_ssize_t ignore;
return __Pyx_PyObject_AsStringAndSize(o, &ignore);
}
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
#if !CYTHON_PEP393_ENABLED
static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
char* defenc_c;
PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
if (!defenc) return NULL;
defenc_c = PyBytes_AS_STRING(defenc);
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
{
char* end = defenc_c + PyBytes_GET_SIZE(defenc);
char* c;
for (c = defenc_c; c < end; c++) {
if ((unsigned char) (*c) >= 128) {
PyUnicode_AsASCIIString(o);
return NULL;
}
}
}
#endif
*length = PyBytes_GET_SIZE(defenc);
return defenc_c;
}
#else
static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
if (likely(PyUnicode_IS_ASCII(o))) {
*length = PyUnicode_GET_LENGTH(o);
return PyUnicode_AsUTF8(o);
} else {
PyUnicode_AsASCIIString(o);
return NULL;
}
#else
return PyUnicode_AsUTF8AndSize(o, length);
#endif
}
#endif
#endif
static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
if (
#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
__Pyx_sys_getdefaultencoding_not_ascii &&
#endif
PyUnicode_Check(o)) {
return __Pyx_PyUnicode_AsStringAndSize(o, length);
} else
#endif
#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))
if (PyByteArray_Check(o)) {
*length = PyByteArray_GET_SIZE(o);
return PyByteArray_AS_STRING(o);
} else
#endif
{
char* result;
int r = PyBytes_AsStringAndSize(o, &result, length);
if (unlikely(r < 0)) {
return NULL;
} else {
return result;
}
}
}
static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
int is_true = x == Py_True;
if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
else return PyObject_IsTrue(x);
}
static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) {
int retval;
if (unlikely(!x)) return -1;
retval = __Pyx_PyObject_IsTrue(x);
Py_DECREF(x);
return retval;
}
static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
#if PY_MAJOR_VERSION >= 3
if (PyLong_Check(result)) {
if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
"__int__ returned non-int (type %.200s). "
"The ability to return an instance of a strict subclass of int "
"is deprecated, and may be removed in a future version of Python.",
Py_TYPE(result)->tp_name)) {
Py_DECREF(result);
return NULL;
}
return result;
}
#endif
PyErr_Format(PyExc_TypeError,
"__%.4s__ returned non-%.4s (type %.200s)",
type_name, type_name, Py_TYPE(result)->tp_name);
Py_DECREF(result);
return NULL;
}
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
#if CYTHON_USE_TYPE_SLOTS
PyNumberMethods *m;
#endif
const char *name = NULL;
PyObject *res = NULL;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x) || PyLong_Check(x)))
#else
if (likely(PyLong_Check(x)))
#endif
return __Pyx_NewRef(x);
#if CYTHON_USE_TYPE_SLOTS
m = Py_TYPE(x)->tp_as_number;
#if PY_MAJOR_VERSION < 3
if (m && m->nb_int) {
name = "int";
res = m->nb_int(x);
}
else if (m && m->nb_long) {
name = "long";
res = m->nb_long(x);
}
#else
if (likely(m && m->nb_int)) {
name = "int";
res = m->nb_int(x);
}
#endif
#else
if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
res = PyNumber_Int(x);
}
#endif
if (likely(res)) {
#if PY_MAJOR_VERSION < 3
if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
#else
if (unlikely(!PyLong_CheckExact(res))) {
#endif
return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
}
}
else if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_TypeError,
"an integer is required");
}
return res;
}
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
Py_ssize_t ival;
PyObject *x;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_CheckExact(b))) {
if (sizeof(Py_ssize_t) >= sizeof(long))
return PyInt_AS_LONG(b);
else
return PyInt_AsSsize_t(b);
}
#endif
if (likely(PyLong_CheckExact(b))) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)b)->ob_digit;
const Py_ssize_t size = Py_SIZE(b);
if (likely(__Pyx_sst_abs(size) <= 1)) {
ival = likely(size) ? digits[0] : 0;
if (size == -1) ival = -ival;
return ival;
} else {
switch (size) {
case 2:
if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -2:
if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case 3:
if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -3:
if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case 4:
if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -4:
if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
}
}
#endif
return PyLong_AsSsize_t(b);
}
x = PyNumber_Index(b);
if (!x) return -1;
ival = PyInt_AsSsize_t(x);
Py_DECREF(x);
return ival;
}
static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {
return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);
}
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
return PyInt_FromSize_t(ival);
}
#endif /* Py_PYTHON_H */
|
anomalydetector/msanomalydetector/_anomaly_kernel_cython.c/0
|
{
"file_path": "anomalydetector/msanomalydetector/_anomaly_kernel_cython.c",
"repo_id": "anomalydetector",
"token_count": 517182
}
| 302 |
import unittest
import pandas as pd
import numpy as np
from msanomalydetector import SpectralResidual, DetectMode
class FunctionalyTest(unittest.TestCase):
def test_anomaly_only_mode(self):
frame = pd.DataFrame({'timestamp': pd.date_range('2020-01-01', periods=100, freq='1D'),
'value': np.linspace(1, 100, 100)})
model = SpectralResidual(frame, threshold=0.3, mag_window=3, score_window=21, sensitivity=99,
detect_mode=DetectMode.anomaly_only, batch_size=0)
result = model.detect()
self.assertEqual(result.shape[0], frame.shape[0])
self.assertTrue('value' in result.columns)
self.assertTrue('isAnomaly' in result.columns)
self.assertTrue('score' in result.columns)
self.assertTrue('expectedValue' not in result.columns)
self.assertTrue('upperBoundary' not in result.columns)
self.assertTrue('lowerBoundary' not in result.columns)
def test_anomaly_and_margin_mode(self):
frame = pd.DataFrame({'timestamp': pd.date_range('2020-01-01', periods=100, freq='1D'),
'value': np.linspace(1, 100, 100)})
model = SpectralResidual(frame, threshold=0.3, mag_window=3, score_window=21, sensitivity=99,
detect_mode=DetectMode.anomaly_and_margin, batch_size=0)
result = model.detect()
self.assertEqual(result.shape[0], frame.shape[0])
self.assertTrue('value' in result.columns)
self.assertTrue('isAnomaly' in result.columns)
self.assertTrue('score' in result.columns)
self.assertTrue('expectedValue' in result.columns)
self.assertTrue('upperBoundary' in result.columns)
self.assertTrue('lowerBoundary' in result.columns)
def test_batch_mode(self):
frame = pd.DataFrame({'timestamp': pd.date_range('2020-01-01', periods=100, freq='1D'),
'value': np.linspace(1, 100, 100)})
model = SpectralResidual(frame, threshold=0.3, mag_window=3, score_window=21, sensitivity=99,
detect_mode=DetectMode.anomaly_and_margin, batch_size=33)
result = model.detect()
self.assertEqual(result.shape[0], frame.shape[0])
self.assertTrue('value' in result.columns)
self.assertTrue('isAnomaly' in result.columns)
self.assertTrue('score' in result.columns)
self.assertTrue('expectedValue' in result.columns)
self.assertTrue('upperBoundary' in result.columns)
self.assertTrue('lowerBoundary' in result.columns)
if __name__ == '__main__':
unittest.main()
|
anomalydetector/tests/test_spectral_residual.py/0
|
{
"file_path": "anomalydetector/tests/test_spectral_residual.py",
"repo_id": "anomalydetector",
"token_count": 1183
}
| 303 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import inspect
import warnings
from functools import wraps
from typing import Any, Callable, Optional
_deprecate_warnings_set = set()
def deprecated(
message: Optional[str] = None, deprecate_version: Optional[str] = None, remove_version: Optional[str] = None
) -> None:
"""Decorator to mark a function or class as deprecated.
Args:
message: Message to include in the warning.
deprecated_version: Version in which the function was deprecated.
If `None`, the version will not be included in the warning message.
remove_version: Version in which the function will be removed.
If `None`, the version will not be included in the warning message.
"""
def _deprecated(class_or_func: Callable) -> Callable:
global _deprecate_warnings_set
obj = class_or_func
if inspect.isclass(class_or_func):
obj = obj.__init__
obj_name = class_or_func.__name__
# Spaces are positioned with the intention of aligning
# the message with the warning message
dpr_version_message = f"in v{deprecate_version} " if deprecate_version else ""
remove_version_message = f" in v{remove_version}" if remove_version else ""
dpr_message = (
f"`{obj_name}` has been deprecated {dpr_version_message}and will be removed{remove_version_message}."
)
dpr_message += f" {message}" if message else ""
@wraps(obj)
def __deprecated(*args, **kwargs) -> Any:
# Avoids printing the same warning multiple times`
obj_hash = hash(obj)
if obj_hash not in _deprecate_warnings_set:
warnings.warn(dpr_message, category=FutureWarning, stacklevel=2)
_deprecate_warnings_set.add(obj_hash)
return obj(*args, **kwargs)
__deprecated._decorator_name_ = "deprecated"
if inspect.isclass(class_or_func):
class_or_func.__init__ = __deprecated
return class_or_func
return __deprecated
return _deprecated
|
archai/archai/common/deprecation_utils.py/0
|
{
"file_path": "archai/archai/common/deprecation_utils.py",
"repo_id": "archai",
"token_count": 843
}
| 304 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from collections import OrderedDict
from enum import Enum
from typing import List, Optional
def _dedup_list(input_list: List[str]) -> List[str]:
return list(OrderedDict.fromkeys(input_list))
class SpecialTokenEnum(Enum):
"""Enumerate special tokens."""
UNK = 0
BOS = 1
EOS = 2
PAD = 3
MASK = 4
class TokenConfig:
"""Store and access configuration options for special tokens,
such as BOS, EOS, UNK, and PAD.
"""
def __init__(
self,
bos_token: Optional[str] = "<|endoftext|>",
eos_token: Optional[str] = "<|endoftext|>",
unk_token: Optional[str] = "<|endoftext|>",
pad_token: Optional[str] = None,
add_prefix_space: Optional[bool] = False,
add_prefix_new_line: Optional[bool] = False,
lower_case: Optional[bool] = False,
) -> None:
"""Initialize the `TokenConfig` class by setting the specified attributes.
Args:
bos_token: Begin-of-sentence token.
eos_token: End-of-sentence token.
unk_token: Unknown token.
pad_token: Padding token.
add_prefix_space: Whether a prefix space token should be added.
add_prefix_new_line: Whether a prefix new line token should be added.
lower_case: Whether lower case should be applied.
"""
self.bos_token = bos_token
self.eos_token = eos_token
self.unk_token = unk_token
self.pad_token = pad_token
self.add_prefix_space = add_prefix_space
self.add_prefix_new_line = add_prefix_new_line
self.lower_case = lower_case
def get_special_tokens(self) -> List[str]:
"""Return a list of all available special tokens.
Returns:
Special tokens.
"""
return _dedup_list([stok for stok in (self.unk_token, self.bos_token, self.eos_token, self.pad_token) if stok])
def special_token_name(self, sp: SpecialTokenEnum) -> str:
"""Return the name of a special token.
Args:
sp: Special token enumerator.
Returns:
Special token name.
"""
if sp == SpecialTokenEnum.BOS:
return self.bos_token
if sp == SpecialTokenEnum.EOS:
return self.eos_token
if sp == SpecialTokenEnum.UNK:
return self.unk_token
if sp == SpecialTokenEnum.PAD:
return self.pad_token
return None
|
archai/archai/datasets/nlp/tokenizer_utils/token_config.py/0
|
{
"file_path": "archai/archai/datasets/nlp/tokenizer_utils/token_config.py",
"repo_id": "archai",
"token_count": 1091
}
| 305 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
import re
from pathlib import Path
from time import time
from typing import Any, Dict, List, Optional, Tuple, Union
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.search_objectives import SearchObjectives
from archai.discrete_search.api.search_space import DiscreteSearchSpace
from archai.discrete_search.utils.multi_objective import (
_find_pareto_frontier_points,
get_pareto_frontier,
)
class SearchResults:
"""Discrete search results.
This class implements search results, which consists in producing data frames
and plots with information regarding the search.
"""
def __init__(self, search_space: DiscreteSearchSpace, objectives: SearchObjectives) -> None:
"""Initialize the search results.
Args:
search_space: Search space.
objectives: Search objectives.
"""
self.search_space = search_space
self.objectives = objectives
self.iteration_num = 0
self.init_time = time()
self.search_walltimes = []
self.results = []
@property
def all_evaluated_objs(self) -> Dict[str, np.array]:
"""Return all evaluated objectives."""
return {
obj_name: np.array([r for iter_results in self.results for r in iter_results[obj_name]], dtype=np.float32)
for obj_name in self.objectives.objectives
}
def add_iteration_results(
self,
models: List[ArchaiModel],
evaluation_results: Dict[str, np.ndarray],
extra_model_data: Optional[Dict[str, List]] = None,
) -> None:
"""Store results of the current search iteration.
Args:
models: Models evaluated in the search iteration.
evaluation_results: Evaluation results from `SearchObjectives.eval_all_objs()`.
extra_model_data: Additional model information to be stored in the search state
file. Must be a list of the same size as `models` and csv-serializable.
"""
assert all(obj_name in evaluation_results for obj_name in self.objectives.objectives)
assert all(len(r) == len(models) for r in evaluation_results.values())
extra_model_data = copy.deepcopy(extra_model_data) or dict()
if extra_model_data:
assert all(len(v) == len(models) for v in extra_model_data.values())
evaluation_results = copy.deepcopy(evaluation_results)
evaluation_results.update(extra_model_data)
self.results.append(
{
"archid": [m.archid for m in models],
"models": [m for m in models], # To avoid creating a reference to `models` variable
**evaluation_results,
}
)
# Adds current search duration in hours
self.search_walltimes += [(time() - self.init_time) / 3600] * len(models)
self.iteration_num += 1
def get_pareto_frontier(
self, start_iteration: Optional[int] = 0, end_iteration: Optional[int] = None
) -> Dict[str, Any]:
"""Get the pareto-frontier using the search results from iterations `start_iteration`
to `end_iteration`. If `end_iteration=None`, uses the last iteration.
Args:
start_iteration: Start search iteration.
end_iteration: End search iteration. If `None`, uses the last iteration.
Returns:
Dictionary containing 'models', 'evaluation_results', 'indices' and
'iteration_nums' for all pareto-frontier members.
"""
end_iteration = end_iteration or self.iteration_num
all_models = [model for it in range(start_iteration, end_iteration) for model in self.results[it]["models"]]
all_results = {
obj_name: np.concatenate(
[self.results[it][obj_name] for it in range(start_iteration, end_iteration)], axis=0
)
for obj_name in self.objectives.objective_names
}
all_iteration_nums = np.array(
[it for it in range(start_iteration, end_iteration) for _ in range(len(self.results[it]["models"]))]
)
pareto_frontier = get_pareto_frontier(all_models, all_results, self.objectives)
pareto_frontier.update({"iteration_nums": all_iteration_nums[pareto_frontier["indices"]]})
return pareto_frontier
def get_search_state_df(self) -> pd.DataFrame:
"""Get the search state data frame.
Returns:
Search state data frame.
"""
state_df = pd.concat(
[pd.DataFrame(it_results).assign(iteration_num=it) for it, it_results in enumerate(self.results)], axis=0
).reset_index(drop=True)
state_df["search_walltime_hours"] = self.search_walltimes
pareto_frontier = self.get_pareto_frontier()
state_df["is_pareto"] = False
state_df.loc[pareto_frontier["indices"], "is_pareto"] = True
return state_df.drop(["models"], axis=1)
def save_search_state(self, file_path: Union[str, Path]) -> None:
"""Save the search state to a .csv file.
Args:
file_path: File path to save the search state.
"""
state_df = self.get_search_state_df()
state_df.to_csv(file_path, index=False)
def save_pareto_frontier_models(self, directory: str, save_weights: Optional[bool] = False) -> None:
"""Save the pareto-frontier models to a directory.
Args:
directory: Directory to save the models.
save_weights: If `True`, saves the model weights. Otherwise, only saves the architecture.
"""
dir_path = Path(directory)
dir_path.mkdir(exist_ok=True, parents=True)
pareto_frontier = self.get_pareto_frontier()
for model in pareto_frontier["models"]:
self.search_space.save_arch(model, str(dir_path / f"{model.archid}"))
if save_weights:
self.search_space.save_model_weights(model, str(dir_path / f"{model.archid}_weights.pt"))
def plot_2d_pareto_evolution(
self, objective_names: Tuple[str, str], figsize: Optional[Tuple[int, int]] = (10, 5)
) -> plt.Figure:
"""Plot the evolution of the pareto-frontier in 2D.
Args:
objective_names: Names of the objectives to plot.
figsize: Figure size.
Returns:
2D pareto-frontier evolution figure.
"""
obj_x, obj_y = objective_names
status_df = self.get_search_state_df().copy()
fig, ax = plt.subplots(figsize=figsize)
fig.patch.set_facecolor('white')
status_range = range(0, self.iteration_num + 1)
# Transforms dimensions to be decreasing if necessary
max_x, max_y = status_df[obj_x].max(), status_df[obj_y].max()
status_df["x"], status_df["y"] = status_df[obj_x], status_df[obj_y]
if self.objectives.objectives[obj_x].higher_is_better:
status_df["x"] = max_x - status_df["x"]
if self.objectives.objectives[obj_y].higher_is_better:
status_df["y"] = max_y - status_df["y"]
colors = plt.cm.plasma(np.linspace(0, 1, self.iteration_num + 1))
sm = plt.cm.ScalarMappable(cmap=plt.cm.plasma, norm=plt.Normalize(vmin=0, vmax=self.iteration_num + 1))
for s in status_range:
generation_df = status_df.query(f"iteration_num <= {s}").copy()
points = generation_df[["x", "y"]].values
pareto_df = generation_df.iloc[_find_pareto_frontier_points(points)].copy()
pareto_df = pareto_df.sort_values("x")
ax.step(pareto_df[obj_x], pareto_df[obj_y], where="post", color=colors[s])
ax.plot(pareto_df[obj_x], pareto_df[obj_y], "o", color=colors[s])
ax.set_xlabel(obj_x)
ax.set_ylabel(obj_y)
cbar = fig.colorbar(sm, ax=ax)
cbar.set_label("Iteration number", rotation=270, labelpad=15)
ax.set_title("Evolution of Pareto Frontier (2D projection)")
plt.close()
return fig
def save_2d_pareto_evolution_plot(self, objective_names: Tuple[str, str], file_path: str) -> None:
"""Save the evolution of the pareto-frontier in 2D.
Args:
objective_names: Names of the objectives to plot.
file_path: Path to save the plot.
"""
fig = self.plot_2d_pareto_evolution(objective_names)
fig.savefig(file_path)
def save_all_2d_pareto_evolution_plots(self, directory: Union[str, Path]) -> None:
"""Save all the 2D pareto-frontier evolution plots.
Args:
directory: Directory to save the plots.
"""
path = Path(directory)
path.mkdir(exist_ok=True, parents=True)
objective_names = list(self.objectives.objective_names)
plots = []
for i, obj_x in enumerate(objective_names):
for obj_y in objective_names[(i + 1) :]:
# Sanitizes filename
fname = f"pareto_{obj_x}_vs_{obj_y}.png".strip().replace(" ", "_")
fname = re.sub(r"(?u)[^-\w.]", "", fname)
plots.append(self.save_2d_pareto_evolution_plot((obj_x, obj_y), str(path / fname)))
|
archai/archai/discrete_search/api/search_results.py/0
|
{
"file_path": "archai/archai/discrete_search/api/search_results.py",
"repo_id": "archai",
"token_count": 4094
}
| 306 |
# Copyright (c) DeepSpeed Team - Microsoft Corporation.
# Licensed under the MIT License.
# https://github.com/microsoft/DeepSpeed/blob/master/deepspeed/profiling/flops_profiler/profiler.py
from collections import OrderedDict
from typing import Callable, List, Optional, Tuple, Union
import numpy as np
import torch
import torch.nn.functional as F
FLOPS = []
MACS = []
TORCH_FUNCTIONS = {}
def __shape_inner_product(dims: Tuple[int, ...]) -> int:
p = 1
for v in dims:
p *= v
return p
def _linear_hook(input: torch.Tensor, weight: torch.Tensor, bias: Optional[torch.Tensor] = None) -> Tuple[int, int]:
out_features = weight.shape[0]
macs = torch.numel(input) * out_features
return 2 * macs, macs
def _relu_hook(input: torch.Tensor, inplace: Optional[bool] = False) -> Tuple[int, int]:
return torch.numel(input), 0
def _prelu_hook(input: torch.Tensor, weight: torch.Tensor) -> Tuple[int, int]:
return torch.numel(input), 0
def _elu_hook(input: torch.Tensor, alpha: Optional[float] = 1.0, inplace: Optional[bool] = False) -> Tuple[int, int]:
return torch.numel(input), 0
def _leakyrelu_hook(
input: torch.Tensor, negative_slope: Optional[float] = 0.01, inplace: Optional[bool] = False
) -> Tuple[int, int]:
return torch.numel(input), 0
def _relu6_hook(input: torch.Tensor, inplace: Optional[bool] = False) -> Tuple[int, int]:
return torch.numel(input), 0
def _silu_hook(input: torch.Tensor, inplace: Optional[bool] = False) -> Tuple[int, int]:
return torch.numel(input), 0
def _gelu_hook(input: torch.Tensor, approximate: str = "none") -> Tuple[int, int]:
return torch.numel(input), 0
def _pool_hook(
input: torch.Tensor,
kernel_size: Union[int, Tuple[int, int]],
stride: Optional[Union[int, Tuple[int, int]]] = None,
padding: Optional[int] = 0,
dilation: Optional[int] = None,
ceil_mode: Optional[bool] = False,
count_include_pad: Optional[bool] = True,
divisor_override: Optional[int] = None,
return_indices: Optional[bool] = None,
) -> Tuple[int, int]:
return torch.numel(input), 0
def _conv_hook(
input: torch.Tensor,
weight: torch.Tensor,
bias: Optional[torch.Tensor] = None,
stride: Optional[Union[int, Tuple[int, ...]]] = 1,
padding: Optional[Union[int, str]] = 0,
dilation: Optional[Union[int, Tuple[int, ...]]] = 1,
groups: Optional[int] = 1,
) -> Tuple[int, int]:
assert weight.shape[1] * groups == input.shape[1]
batch_size = input.shape[0]
in_channels = input.shape[1]
out_channels = weight.shape[0]
kernel_dims = list(weight.shape[2:])
input_dims = list(input.shape[2:])
length = len(input_dims)
paddings = padding if type(padding) is tuple else (padding,) * length
strides = stride if type(stride) is tuple else (stride,) * length
dilations = dilation if type(dilation) is tuple else (dilation,) * length
output_dims = []
for idx, input_dim in enumerate(input_dims):
output_dim = (input_dim + 2 * paddings[idx] - (dilations[idx] * (kernel_dims[idx] - 1) + 1)) // strides[idx] + 1
output_dims.append(output_dim)
filters_per_channel = out_channels // groups
conv_per_position_macs = int(__shape_inner_product(kernel_dims)) * in_channels * filters_per_channel
active_elements_count = batch_size * int(__shape_inner_product(output_dims))
overall_conv_macs = conv_per_position_macs * active_elements_count
overall_conv_flops = 2 * overall_conv_macs
bias_flops = 0
if bias is not None:
bias_flops = out_channels * active_elements_count
return int(overall_conv_flops + bias_flops), int(overall_conv_macs)
def _conv_transpose_hook(
input: torch.Tensor,
weight: torch.Tensor,
bias: Optional[torch.Tensor] = None,
stride: Optional[Union[int, Tuple[int, ...]]] = 1,
padding: Optional[Union[int, str]] = 0,
output_padding: Optional[int] = 0,
dilation: Optional[Union[int, Tuple[int, ...]]] = 1,
groups: Optional[int] = 1,
) -> Tuple[int, int]:
batch_size = input.shape[0]
in_channels = input.shape[1]
out_channels = weight.shape[0]
kernel_dims = list(weight.shape[2:])
input_dims = list(input.shape[2:])
length = len(input_dims)
paddings = padding if type(padding) is tuple else (padding,) * length
strides = stride if type(stride) is tuple else (stride,) * length
dilations = dilation if type(dilation) is tuple else (dilation,) * length
output_dims = []
for idx, input_dim in enumerate(input_dims):
output_dim = (input_dim + 2 * paddings[idx] - (dilations[idx] * (kernel_dims[idx] - 1) + 1)) // strides[idx] + 1
output_dims.append(output_dim)
paddings = padding if type(padding) is tuple else (padding, padding)
strides = stride if type(stride) is tuple else (stride, stride)
dilations = dilation if type(dilation) is tuple else (dilation, dilation)
filters_per_channel = out_channels // groups
conv_per_position_macs = int(__shape_inner_product(kernel_dims)) * in_channels * filters_per_channel
active_elements_count = batch_size * int(__shape_inner_product(input_dims))
overall_conv_macs = conv_per_position_macs * active_elements_count
overall_conv_flops = 2 * overall_conv_macs
bias_flops = 0
if bias is not None:
bias_flops = out_channels * batch_size * int(__shape_inner_product(output_dims))
return int(overall_conv_flops + bias_flops), int(overall_conv_macs)
def _batch_norm_hook(
input: torch.Tensor,
running_mean: Optional[torch.Tensor] = None,
running_var: Optional[torch.Tensor] = None,
weight: Optional[torch.Tensor] = None,
bias: Optional[torch.Tensor] = None,
training: Optional[bool] = False,
momentum: Optional[float] = 0.1,
eps: Optional[float] = 1e-05,
) -> Tuple[int, int]:
has_affine = weight is not None
if training:
return torch.numel(input) * (5 if has_affine else 4), 0
flops = torch.numel(input) * (2 if has_affine else 1)
return flops, 0
def _layer_norm_hook(
input: torch.Tensor,
normalized_shape: List[int],
weight: Optional[torch.Tensor] = None,
bias: Optional[torch.Tensor] = None,
eps: Optional[float] = 1e-5,
) -> Tuple[int, int]:
has_affine = weight is not None
return torch.numel(input) * (5 if has_affine else 4), 0
def _instance_norm_hook(
input: torch.Tensor,
running_mean: Optional[torch.Tensor] = None,
running_var: Optional[torch.Tensor] = None,
weight: Optional[torch.Tensor] = None,
bias: Optional[torch.Tensor] = None,
use_input_stats: Optional[bool] = True,
momentum: Optional[float] = 0.1,
eps: Optional[float] = 1e-5,
) -> Tuple[int, int]:
has_affine = weight is not None
return torch.numel(input) * (5 if has_affine else 4), 0
def _group_norm_hook(
input: torch.Tensor,
num_groups: int,
weight: Optional[torch.Tensor] = None,
bias: Optional[torch.Tensor] = None,
eps: Optional[float] = 1e-5,
) -> Tuple[int, int]:
has_affine = weight is not None
return torch.numel(input) * (5 if has_affine else 4), 0
def _upsample_hook(
input: torch.Tensor,
size: Optional[Union[int, Tuple[int, ...]]] = None,
scale_factor: Optional[Union[float, Tuple[float]]] = None,
mode: Optional[str] = "nearest",
align_corners: Optional[bool] = None,
recompute_scale_factor: Optional[bool] = None,
) -> Tuple[int, int]:
if size is not None:
if isinstance(size, tuple):
return int(__shape_inner_product(size)), 0
else:
return int(size), 0
assert scale_factor is not None, "Either `size` or `scale_factor` should be defined."
flops = torch.numel(input)
if isinstance(scale_factor, tuple) and len(scale_factor) == len(input):
flops * int(__shape_inner_product(scale_factor))
else:
flops * scale_factor ** len(input)
return flops, 0
def _softmax_hook(
input: torch.Tensor, dim: Optional[int] = None, _stacklevel: Optional[int] = 3, dtype: Optional[torch.dtype] = None
) -> Tuple[int, int]:
return torch.numel(input), 0
def _embedding_hook(
input: torch.Tensor,
weight: torch.Tensor,
padding_idx: Optional[int] = None,
max_norm: Optional[float] = None,
norm_type: Optional[float] = 2.0,
scale_grad_by_freq: Optional[bool] = False,
sparse: Optional[bool] = False,
) -> Tuple[int, int]:
return 0, 0
def _matmul_hook(input: torch.Tensor, other: torch.Tensor, *, out: Optional[Tuple[int, ...]] = None) -> Tuple[int, int]:
macs = __shape_inner_product(input.shape) * other.shape[-1]
return 2 * macs, macs
def _addmm_hook(
input: torch.Tensor,
mat1: torch.Tensor,
mat2: torch.Tensor,
*,
beta: Optional[int] = 1,
alpha: Optional[int] = 1,
out: Optional[Tuple[int, ...]] = None
) -> Tuple[int, int]:
macs = __shape_inner_product(mat1.shape) * mat2.shape[-1]
return 2 * macs + __shape_inner_product(input.shape), macs
def _einsum_hook(equation: str, *operands) -> Tuple[int, int]:
equation = equation.replace(" ", "")
# Fix for `opt_einsum.contract`
if len(operands) == 1 and isinstance(operands[0], tuple):
operands = operands[0]
input_shapes = [o.shape for o in operands]
letter_order = OrderedDict((k, 0) for k in equation if k.isalpha()).keys()
mapping = {ord(x): 97 + i for i, x in enumerate(letter_order)}
equation = equation.translate(mapping)
np_arrs = [np.zeros(s) for s in input_shapes]
optim = np.einsum_path(equation, *np_arrs, optimize="optimal")[1]
for line in optim.split("\n"):
if "optimized flop" in line.lower():
flop = int(float(line.split(":")[-1]))
return flop, 0
raise NotImplementedError("Unsupported einsum operation.")
def __elementwise_hook(input: torch.Tensor, other: torch.Tensor) -> Tuple[int, int]:
if not torch.is_tensor(input):
if torch.is_tensor(other):
return __shape_inner_product(other.shape), 0
else:
return 1, 0
elif not torch.is_tensor(other):
return __shape_inner_product(input.shape), 0
else:
dim_input = len(input.shape)
dim_other = len(other.shape)
max_dim = max(dim_input, dim_other)
final_shape = []
for i in range(max_dim):
in_i = input.shape[i] if i < dim_input else 1
ot_i = other.shape[i] if i < dim_other else 1
if in_i > ot_i:
final_shape.append(in_i)
else:
final_shape.append(ot_i)
flops = __shape_inner_product(final_shape)
return flops, 0
def _mul_hook(input: torch.Tensor, other: torch.Tensor, *, out: Optional[Tuple[int, ...]] = None) -> Tuple[int, int]:
return __elementwise_hook(input, other)
def _add_hook(
input: torch.Tensor, other: torch.Tensor, *, alpha: Optional[int] = 1, out: Optional[Tuple[int, ...]] = None
) -> Tuple[int, int]:
return __elementwise_hook(input, other)
def _wrap_fn(fn: Callable, new_fn: Callable) -> Callable:
"""Wraps a function with another function.
Args:
fn: Current function.
new_fn: New function.
Returns:
(Callable): Wrapped function.
"""
old_fn = fn
name = fn.__name__
TORCH_FUNCTIONS[name] = old_fn
def __wrap_fn(*args, **kwargs):
flops, macs = new_fn(*args, **kwargs)
if FLOPS:
FLOPS[-1].append((name, flops))
if MACS and macs:
MACS[-1].append((name, macs))
return old_fn(*args, **kwargs)
__wrap_fn.__name__ = fn.__name__
return __wrap_fn
def enable_functional_hooks() -> None:
"""Enables functional API profiler hooks."""
F.linear = _wrap_fn(F.linear, _linear_hook)
F.conv1d = _wrap_fn(F.conv1d, _conv_hook)
F.conv2d = _wrap_fn(F.conv2d, _conv_hook)
F.conv3d = _wrap_fn(F.conv3d, _conv_hook)
F.conv_transpose1d = _wrap_fn(F.conv_transpose1d, _conv_transpose_hook)
F.conv_transpose2d = _wrap_fn(F.conv_transpose2d, _conv_transpose_hook)
F.conv_transpose3d = _wrap_fn(F.conv_transpose3d, _conv_transpose_hook)
F.relu = _wrap_fn(F.relu, _relu_hook)
F.prelu = _wrap_fn(F.prelu, _prelu_hook)
F.elu = _wrap_fn(F.elu, _elu_hook)
F.leaky_relu = _wrap_fn(F.leaky_relu, _leakyrelu_hook)
F.relu6 = _wrap_fn(F.relu6, _relu6_hook)
if hasattr(F, "silu"):
F.silu = _wrap_fn(F.silu, _silu_hook)
F.gelu = _wrap_fn(F.gelu, _gelu_hook)
F.batch_norm = _wrap_fn(F.batch_norm, _batch_norm_hook)
F.layer_norm = _wrap_fn(F.layer_norm, _layer_norm_hook)
F.instance_norm = _wrap_fn(F.instance_norm, _instance_norm_hook)
F.group_norm = _wrap_fn(F.group_norm, _group_norm_hook)
F.avg_pool1d = _wrap_fn(F.avg_pool1d, _pool_hook)
F.avg_pool2d = _wrap_fn(F.avg_pool2d, _pool_hook)
F.avg_pool3d = _wrap_fn(F.avg_pool3d, _pool_hook)
F.max_pool1d = _wrap_fn(F.max_pool1d, _pool_hook)
F.max_pool2d = _wrap_fn(F.max_pool2d, _pool_hook)
F.max_pool3d = _wrap_fn(F.max_pool3d, _pool_hook)
F.adaptive_avg_pool1d = _wrap_fn(F.adaptive_avg_pool1d, _pool_hook)
F.adaptive_avg_pool2d = _wrap_fn(F.adaptive_avg_pool2d, _pool_hook)
F.adaptive_avg_pool3d = _wrap_fn(F.adaptive_avg_pool3d, _pool_hook)
F.adaptive_max_pool1d = _wrap_fn(F.adaptive_max_pool1d, _pool_hook)
F.adaptive_max_pool2d = _wrap_fn(F.adaptive_max_pool2d, _pool_hook)
F.adaptive_max_pool3d = _wrap_fn(F.adaptive_max_pool3d, _pool_hook)
F.upsample = _wrap_fn(F.upsample, _upsample_hook)
F.interpolate = _wrap_fn(F.interpolate, _upsample_hook)
F.softmax = _wrap_fn(F.softmax, _softmax_hook)
F.embedding = _wrap_fn(F.embedding, _embedding_hook)
def disable_functional_hooks() -> None:
"""Disables functional API profiler hooks."""
F.linear = TORCH_FUNCTIONS[F.linear.__name__]
F.conv1d = TORCH_FUNCTIONS[F.conv1d.__name__]
F.conv2d = TORCH_FUNCTIONS[F.conv2d.__name__]
F.conv3d = TORCH_FUNCTIONS[F.conv3d.__name__]
F.conv_transpose1d = TORCH_FUNCTIONS[F.conv_transpose1d.__name__]
F.conv_transpose2d = TORCH_FUNCTIONS[F.conv_transpose2d.__name__]
F.conv_transpose3d = TORCH_FUNCTIONS[F.conv_transpose3d.__name__]
F.relu = TORCH_FUNCTIONS[F.relu.__name__]
F.prelu = TORCH_FUNCTIONS[F.prelu.__name__]
F.elu = TORCH_FUNCTIONS[F.elu.__name__]
F.leaky_relu = TORCH_FUNCTIONS[F.leaky_relu.__name__]
F.relu6 = TORCH_FUNCTIONS[F.relu6.__name__]
F.batch_norm = TORCH_FUNCTIONS[F.batch_norm.__name__]
F.layer_norm = TORCH_FUNCTIONS[F.layer_norm.__name__]
F.instance_norm = TORCH_FUNCTIONS[F.instance_norm.__name__]
F.group_norm = TORCH_FUNCTIONS[F.group_norm.__name__]
F.avg_pool1d = TORCH_FUNCTIONS[F.avg_pool1d.__name__]
F.avg_pool2d = TORCH_FUNCTIONS[F.avg_pool2d.__name__]
F.avg_pool3d = TORCH_FUNCTIONS[F.avg_pool3d.__name__]
F.max_pool1d = TORCH_FUNCTIONS[F.max_pool1d.__name__]
F.max_pool2d = TORCH_FUNCTIONS[F.max_pool2d.__name__]
F.max_pool3d = TORCH_FUNCTIONS[F.max_pool3d.__name__]
F.adaptive_avg_pool1d = TORCH_FUNCTIONS[F.adaptive_avg_pool1d.__name__]
F.adaptive_avg_pool2d = TORCH_FUNCTIONS[F.adaptive_avg_pool2d.__name__]
F.adaptive_avg_pool3d = TORCH_FUNCTIONS[F.adaptive_avg_pool3d.__name__]
F.adaptive_max_pool1d = TORCH_FUNCTIONS[F.adaptive_max_pool1d.__name__]
F.adaptive_max_pool2d = TORCH_FUNCTIONS[F.adaptive_max_pool2d.__name__]
F.adaptive_max_pool3d = TORCH_FUNCTIONS[F.adaptive_max_pool3d.__name__]
F.upsample = TORCH_FUNCTIONS[F.upsample.__name__]
F.interpolate = TORCH_FUNCTIONS[F.interpolate.__name__]
F.softmax = TORCH_FUNCTIONS[F.softmax.__name__]
F.embedding = TORCH_FUNCTIONS[F.embedding.__name__]
def enable_tensor_hooks() -> None:
"""Enables tensor-based operations profiler hooks."""
torch.matmul = _wrap_fn(torch.matmul, _matmul_hook)
torch.mm = _wrap_fn(torch.mm, _matmul_hook)
torch.bmm = _wrap_fn(torch.bmm, _matmul_hook)
torch.addmm = _wrap_fn(torch.addmm, _addmm_hook)
torch.mul = _wrap_fn(torch.mul, _mul_hook)
torch.add = _wrap_fn(torch.add, _add_hook)
torch.einsum = _wrap_fn(torch.einsum, _einsum_hook)
def disable_tensor_hooks() -> None:
"""Disables tensor-based operations profiler hooks."""
torch.matmul = TORCH_FUNCTIONS[torch.matmul.__name__]
torch.mm = TORCH_FUNCTIONS[torch.mm.__name__]
torch.bmm = TORCH_FUNCTIONS[torch.bmm.__name__]
torch.addmm = TORCH_FUNCTIONS[torch.addmm.__name__]
torch.mul = TORCH_FUNCTIONS[torch.mul.__name__]
torch.add = TORCH_FUNCTIONS[torch.add.__name__]
torch.einsum = TORCH_FUNCTIONS[torch.einsum.__name__]
|
archai/archai/discrete_search/evaluators/pt_profiler_utils/pt_profiler_hooks.py/0
|
{
"file_path": "archai/archai/discrete_search/evaluators/pt_profiler_utils/pt_profiler_hooks.py",
"repo_id": "archai",
"token_count": 7260
}
| 307 |
from archai.discrete_search.search_spaces.cv.segmentation_dag.search_space import SegmentationDagSearchSpace
|
archai/archai/discrete_search/search_spaces/cv/__init__.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/cv/__init__.py",
"repo_id": "archai",
"token_count": 35
}
| 308 |
from typing import Any
from torch import nn
from transformers import PretrainedConfig
from archai.discrete_search.search_spaces.config import ArchConfig
from .backbones import BACKBONES, CONFIGS
class LanguageModel(nn.Module):
def __init__(self, arch_config: ArchConfig, **hf_config_kwargs):
super().__init__()
self.backbone = arch_config.pick('backbone', default='codegen')
self.hf_config = LanguageModel.get_hf_config_cls(arch_config)(**hf_config_kwargs)
self.model = BACKBONES[self.backbone](arch_config, self.hf_config)
def forward(self, *args, **kwargs) -> Any:
return self.model(*args, **kwargs)
@staticmethod
def get_hf_config_cls(arch_config: ArchConfig) -> PretrainedConfig:
backbone = arch_config.pick('backbone', default='codegen', record_usage=False)
return CONFIGS[backbone]
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/model.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/model.py",
"repo_id": "archai",
"token_count": 334
}
| 309 |
"""
2023.01.05 Extracted the SSKernel class from
https://github.com/HazyResearch/state-spaces/blob/06dbbdfd0876501a7f12bf3262121badbc7658af/src/models/sequence/ss/kernel.py
We add option to use the shift kernel, and remove the option of SSKernelNPLR
SSM convolution kernels.
SSKernel wraps different kernels with common options and handles the initialization.
"""
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from einops import rearrange, repeat
from opt_einsum import contract
from .ss_kernel_diag import SSKernelDiag, EMAKernel
from .ss_kernel_shift import SSKernelShift
from . import hippo, dplr
from .ssm_ops.krylov import power
_conj = lambda x: torch.cat([x, x.conj()], dim=-1)
class SSKernel(nn.Module):
"""Wrapper around SSKernel parameterizations.
The SSKernel is expected to support the interface
forward()
default_state()
_setup_step()
step()
"""
def __init__(
self,
H,
N=64,
L=None,
measure="diag-lin",
rank=1,
channels=1,
dt_min=0.001,
dt_max=0.1,
deterministic=False,
lr=None,
mode="diag",
n_ssm=None,
verbose=False,
measure_args={},
**kernel_args,
):
"""State Space Kernel which computes the convolution kernel $\\bar{K}$
H: Number of independent SSM copies; controls the size of the model. Also called d_model in the config.
N: State size (dimensionality of parameters A, B, C). Also called d_state in the config. Generally shouldn't need to be adjusted and doens't affect speed much.
L: Maximum length of convolution kernel, if known. Should work in the majority of cases even if not known.
measure: Options for initialization of (A, B). For NPLR mode, recommendations are "legs", "fout", "hippo" (combination of both). For Diag mode, recommendations are "diag-inv", "diag-lin", "diag-legs", and "diag" (combination of diag-inv and diag-lin)
rank: Rank of low-rank correction for NPLR mode. Needs to be increased for measure "legt"
channels: C channels turns the SSM from a 1-dim to C-dim map; can think of it having C separate "heads" per SSM. This was partly a feature to make it easier to implement bidirectionality; it is recommended to set channels=1 and adjust H to control parameters instead
dt_min, dt_max: min and max values for the step size dt (\Delta)
mode: Which kernel algorithm to use. 'nplr' is the full S4 model; 'diag' is the simpler S4D; 'slow' is a dense version for testing
n_ssm: Number of independent trainable (A, B) SSMs, e.g. n_ssm=1 means all A/B parameters are tied across the H different instantiations of C. n_ssm=None means all H SSMs are completely independent. Generally, changing this option can save parameters but doesn't affect performance or speed much. This parameter must divide H
lr: Passing in a number (e.g. 0.001) sets attributes of SSM parameers (A, B, dt). A custom optimizer hook is needed to configure the optimizer to set the learning rates appropriately for these parameters.
"""
super().__init__()
self.N = N
self.H = H
dtype, cdtype = torch.float, torch.cfloat
self.channels = channels
self.n_ssm = n_ssm if n_ssm is not None else H
self.mode = mode
self.verbose = verbose
self.kernel_args = kernel_args
# Generate dt
if deterministic:
log_dt = torch.exp(torch.linspace(math.log(dt_min), math.log(dt_max), H))
else:
log_dt = torch.rand(self.H, dtype=dtype) * (
math.log(dt_max) - math.log(dt_min)
) + math.log(dt_min)
# Compute the preprocessed representation
if mode == "ema":
self.kernel = EMAKernel(H, N=N, channels=channels, **kernel_args)
else:
w, P, B, V = dplr.combination(measure, self.N, rank, self.n_ssm, **measure_args)
# Broadcast C to have H channels
if deterministic:
C = torch.zeros(channels, self.n_ssm, self.N, dtype=cdtype)
C[:, :, :1] = 1.
C = contract('hmn, chn -> chm', V.conj().transpose(-1, -2), C) # V^* C
C = repeat(C, 'c t n -> c (v t) n', v=self.n_ssm // C.size(-2)).clone().contiguous()
else:
C = torch.randn(channels, self.H, self.N//2, dtype=cdtype)
# Broadcast other parameters to have n_ssm copies
assert self.n_ssm % B.size(-2) == 0 \
and self.n_ssm % P.size(-2) == 0 \
and self.n_ssm % w.size(-2) == 0
# Broadcast tensors to n_ssm copies
# These will be the parameters, so make sure tensors are materialized and contiguous
B = repeat(B, 't n -> (v t) n', v=self.n_ssm // B.size(-2)).clone().contiguous()
P = repeat(P, 'r t n -> r (v t) n', v=self.n_ssm // P.size(-2)).clone().contiguous()
w = repeat(w, 't n -> (v t) n', v=self.n_ssm // w.size(-2)).clone().contiguous()
if mode == "diag":
if not measure.startswith("diag"):
print("Diagonal kernel (S4D) activated but initialization is not intended for S4D. Set `measure` to 'diag-lin', 'diag-inv', or 'diag-legs' for the main variants, or 'diag' for a combination of S4D-Lin and S4D-Inv.")
C = C * repeat(B, 't n -> (v t) n', v=H//self.n_ssm)
self.kernel = SSKernelDiag(
w, B, C, log_dt, L=L,
lr=lr,
**kernel_args,
)
elif mode == 'shift':
# Initializing B to be e_1
B = torch.zeros(self.H, self.N)
B[..., 0] = 1.0
# Match torch.Conv1d init
C = torch.randn(self.H, self.channels, self.N)
nn.init.kaiming_uniform_(C, a=math.sqrt(5))
C = rearrange(C, 'h c n -> c h n')
self.kernel = SSKernelShift(B, C, L=L, lr=lr, **kernel_args)
else:
raise NotImplementedError(f"mode={mode} is not valid")
def forward(self, state=None, L=None, rate=None):
return self.kernel(state=state, L=L, rate=rate)
@torch.no_grad()
def forward_state(self, u, state):
""" Forward the state through a sequence, i.e. computes the state after passing chunk through SSM
state: (B, H, N)
u: (B, H, L)
Returns: (B, H, N)
"""
if hasattr(self.kernel, "forward_state"):
return self.kernel.forward_state(u, state)
dA, dB = self.kernel._setup_state() # Construct dA, dB matrices
# dA, dB = self.kernel.dA, self.kernel.dB # (H N N) (H N)
conj = state.size(-1) != dA.size(-1)
if conj: state = _conj(state)
v = contract('h n, b h l -> b h n l', dB, u.flip(-1)) # dB.unsqueeze(-1) * u.flip(-1).unsqueeze(-2)
AL, v = power(u.size(-1), dA, v)
next_state = contract("h m n, b h n -> b h m", AL, state)
next_state = next_state + v
if conj: next_state = next_state[..., : next_state.size(-1) // 2]
return next_state
def _setup_step(self, **kwargs):
# This method is intended to be private so that setting up an S4 module with
# ```
# if hasattr(module, 'setup_step'): module.setup_step()
# ```
# will not trigger this method multiple times
self.kernel._setup_step(**kwargs)
def step(self, u, state, **kwargs):
y, state = self.kernel.step(u, state, **kwargs)
return y, state
def default_state(self, *args, **kwargs):
return self.kernel.default_state(*args, **kwargs)
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/ops/ssm_utils/ss_kernel.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/ops/ssm_utils/ss_kernel.py",
"repo_id": "archai",
"token_count": 3490
}
| 310 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Optional
import torch
import torch.nn as nn
import torch.nn.functional as F
class DepthWiseConvolution(nn.Module):
def __init__(self, d_model: int, kernel_size: Optional[int] = 3) -> None:
super().__init__()
# Depth-Wise Convolution: https://arxiv.org/abs/2109.08668
self.kernel_size = kernel_size
self.dconv = nn.Conv1d(d_model * 3, d_model * 3, kernel_size=kernel_size, groups=d_model * 3)
def forward(self, inputs: torch.FloatTensor) -> torch.FloatTensor:
# LxBxF -> BxFxL
w_heads = inputs.permute((1, 2, 0))
# Pad kernel_size-1 to the left of the length
# so we have causal convolution (can't look forward)
w_heads = F.pad(w_heads, (self.kernel_size - 1, 0))
w_heads = self.dconv(w_heads)
# Permute back: BxFxL -> LxBxF
w_heads = w_heads.permute((2, 0, 1))
return w_heads
|
archai/archai/discrete_search/search_spaces/nlp/transformer_flex/models/mem_transformer_utils/depth_wise_convolution.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/transformer_flex/models/mem_transformer_utils/depth_wise_convolution.py",
"repo_id": "archai",
"token_count": 423
}
| 311 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import types
import torch
from onnx import helper, load_model, numpy_helper, save
from onnxruntime.transformers import quantize_helper
from archai.onnx.onnx_forward import gpt2_onnx_forward
def prepare_model_for_onnx(model: torch.nn.Module, model_type: str) -> torch.nn.Module:
"""Prepare a PyTorch model for ONNX export by modifying the forward function and performing
any additional pre-processing steps.
Args:
model: Instance of the model to prepare for ONNX export.
model_type: Type of model.
Returns:
The prepared PyTorch model, ready for ONNX export.
"""
# For GPT-2 architectures, we replace their forward function
# and converts Conv1D to Linear layers
if model_type in ["gpt2", "gpt2-flex"]:
model.forward = types.MethodType(gpt2_onnx_forward, model)
for layer in model.transformer.h:
quantize_helper.conv1d_to_linear(layer.mlp)
# Ensures evaluation model to disable dropout
model.eval()
return model
def weight_sharing(onnx_model_path: str, model_type: str) -> None:
"""Share weights between embedding and softmax layers in an ONNX model.
Args:
onnx_model_path: Path to the ONNX model that will have weights shared.
model_type: Type of model to share the weights.
"""
# Finds nodes in the graph based on their input name
def _find_nodes_by_input(nodes, input_name):
return [name for name in nodes.keys() if input_name in nodes[name].input]
# Finds weights in the graph based on their shape
def _find_weights_by_shape(weights, shape):
return [name for name in weights.keys() if numpy_helper.to_array(weights[name]).shape == shape]
# Loads the ONNX model
model = load_model(onnx_model_path)
# Gathers weights and nodes from the loaded model
weights = {w.name: w for w in model.graph.initializer}
nodes = {n.name: n for n in model.graph.node}
if model_type in ["gpt2", "gpt2-flex"]:
n_emb_weight = 1
n_cutoffs = 0
else:
raise ValueError(f"model_type: {model_type} not supported for weight sharing.")
for i in range(n_emb_weight):
# Grabs the embedding weights pointer and removes from the graph
emb_weight_name = f"word_emb.emb_layers.{i}.weight"
if model_type in ["gpt2", "gpt2-flex"]:
emb_weight_name = "transformer.wte.weight"
emb_weight = numpy_helper.to_array(weights[emb_weight_name])
model.graph.initializer.remove(weights[emb_weight_name])
# Replaces the duplicated embedding weights by the softmax ones
softmax_shape = (emb_weight.shape[1], emb_weight.shape[0])
if i == 0:
softmax_shape = (emb_weight.shape[1], emb_weight.shape[0] + n_cutoffs)
softmax_weight = _find_weights_by_shape(weights, softmax_shape)[0]
emb_gather_name = _find_nodes_by_input(nodes, emb_weight_name)[0]
nodes[emb_gather_name].attribute.append(helper.make_attribute("axis", 1))
nodes[emb_gather_name].input[0] = softmax_weight
# Adds a "Transpose" node to invert the new embedding weights
permute_dim = [1, 2, 0]
if n_cutoffs != 0:
permute_dim = [1, 0, 2]
emb_gather_output = nodes[emb_gather_name].output[0]
transpose_node_output = f"transposed_out_{i}"
transpose_node = helper.make_node("Transpose", [emb_gather_output], [transpose_node_output], perm=permute_dim)
model.graph.node.append(transpose_node)
# Links the previous embedding output with the "Transpose" node
emb_gather = _find_nodes_by_input(nodes, emb_gather_output)[0]
nodes[emb_gather].input[0] = transpose_node_output
# Saves the ONNX model
save(model, onnx_model_path)
|
archai/archai/onnx/export_utils.py/0
|
{
"file_path": "archai/archai/onnx/export_utils.py",
"repo_id": "archai",
"token_count": 1525
}
| 312 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Optional
import torch
from torch._C import dtype
from torch.quantization import MinMaxObserver
from archai.quantization.observers import OnnxDynamicObserver
class FakeDynamicQuant(torch.nn.Module):
"""Fake dynamic quantizer to allow for scale/zero point calculation during Quantization-Aware Training.
This class allows inserting a fake dynamic quantization operator in a PyTorch model,
in order to calculate scale and zero point values that can be used to quantize the
model during training. The operator can be customized to use different quantization types
(quint8 or qint8) and bit widths, and it can be made compatible with ONNX.
Note: This module is only meant to be used during training, and should not be present
in the final, deployed model.
"""
def __init__(
self,
reduce_range: Optional[bool] = True,
dtype: Optional[dtype] = torch.quint8,
bits: Optional[int] = 8,
onnx_compatible: Optional[bool] = False,
) -> None:
"""Initialize a customizable fake dynamic quantization operator.
Args:
reduce_range: Whether to reduce the range of quantization. This option is
only supported for 8-bit quantization.
dtype: Type of quantization operators. Supported values are `torch.quint8` and
`torch.qint8`.
bits: Number of bits used in the quantization. Supported values are 8 and 16.
onnx_compatible: Whether the quantization should be compatible with ONNX.
"""
super().__init__()
self.bits = bits
self.reduce_range = reduce_range if bits == 8 else False
self.dtype = dtype
self.onnx_compatible = onnx_compatible
assert dtype in (torch.quint8, torch.qint8)
if dtype == torch.quint8:
if self.reduce_range:
self.qmin, self.qmax = 0, 2 ** (bits - 1)
else:
self.qmin, self.qmax = 0, 2**bits - 1
else:
if self.reduce_range:
self.qmin, self.qmax = -(2 ** (bits - 2)), 2 ** (bits - 2) - 1
else:
self.qmin, self.qmax = -(2 ** (bits - 1)), 2 ** (bits - 1) - 1
def forward(self, x: torch.Tensor) -> torch.Tensor:
if x.dtype == torch.float32:
if self.bits == 8:
if self.dtype == torch.quint8:
qscheme = torch.per_tensor_affine
else:
qscheme = torch.per_tensor_symmetric
if self.onnx_compatible:
observer = OnnxDynamicObserver(dtype=self.dtype)
else:
observer = MinMaxObserver(
dtype=self.dtype,
qscheme=qscheme,
reduce_range=self.reduce_range,
)
observer(x)
scale, zero_pointer = observer.calculate_qparams()
else:
min_val, max_val = x.min(), x.max()
initial_scale = (max_val - min_val) / float(self.qmax - self.qmin)
min_zero_pointer = self.qmin - min_val / initial_scale
max_zero_pointer = self.qmax - max_val / initial_scale
min_zero_pointer_error = abs(self.qmin) - abs(min_val / initial_scale)
max_zero_pointer_error = abs(self.qmax) - abs(max_val / initial_scale)
if min_zero_pointer_error < max_zero_pointer_error:
initial_zero_pointer = min_zero_pointer
else:
initial_zero_pointer = max_zero_pointer
initial_zero_pointer = initial_zero_pointer.round()
scale, zero_pointer = initial_scale, initial_zero_pointer
# Prevents `zero_pointer` from being outside the range of the quantized dtype
if zero_pointer > self.qmax:
zero_pointer = torch.tensor(self.qmax)
elif zero_pointer < self.qmin:
zero_pointer = torch.tensor(self.qmin)
x = torch.fake_quantize_per_tensor_affine(
x, float(scale.item()), int(zero_pointer.item()), self.qmin, self.qmax
)
self._scale, self._zero_pointer = scale, zero_pointer
return x
|
archai/archai/quantization/quantizers.py/0
|
{
"file_path": "archai/archai/quantization/quantizers.py",
"repo_id": "archai",
"token_count": 2036
}
| 313 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from overrides import overrides
from archai.common.common import get_conf
from archai.supergraph.algos.darts.bilevel_arch_trainer import BilevelArchTrainer
from archai.supergraph.algos.divnas.divnas_finalizers import DivnasFinalizers
from archai.supergraph.algos.divnas.divnas_model_desc_builder import (
DivnasModelDescBuilder,
)
from archai.supergraph.algos.divnas.divnas_rank_finalizer import DivnasRankFinalizers
from archai.supergraph.nas.arch_trainer import ArchTrainer, TArchTrainer
from archai.supergraph.nas.exp_runner import ExperimentRunner
from archai.supergraph.nas.finalizers import Finalizers
class DivnasExperimentRunner(ExperimentRunner):
@overrides
def model_desc_builder(self)->DivnasModelDescBuilder:
return DivnasModelDescBuilder()
@overrides
def trainer_class(self)->TArchTrainer:
conf = get_conf()
trainer = conf['nas']['search']['divnas']['archtrainer']
if trainer == 'bilevel':
return BilevelArchTrainer
elif trainer == 'noalpha':
return ArchTrainer
else:
raise NotImplementedError
@overrides
def finalizers(self)->Finalizers:
conf = get_conf()
finalizer = conf['nas']['search']['finalizer']
if finalizer == 'mi':
return DivnasFinalizers()
elif finalizer == 'mi_ranked':
return DivnasRankFinalizers()
else:
return super().finalizers()
|
archai/archai/supergraph/algos/divnas/divnas_exp_runner.py/0
|
{
"file_path": "archai/archai/supergraph/algos/divnas/divnas_exp_runner.py",
"repo_id": "archai",
"token_count": 587
}
| 314 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Optional
from overrides import overrides
from archai.common.config import Config
from archai.supergraph.nas.arch_trainer import TArchTrainer
from archai.supergraph.nas.finalizers import Finalizers
from archai.supergraph.nas.model_desc_builder import ModelDescBuilder
from archai.supergraph.nas.searcher import Searcher, SearchResult
class ManualSearcher(Searcher):
@overrides
def search(self, conf_search:Config, model_desc_builder:Optional[ModelDescBuilder],
trainer_class:TArchTrainer, finalizers:Finalizers)->SearchResult:
# for manual search, we already have a model so no search result are returned
return SearchResult(None, None, None)
|
archai/archai/supergraph/algos/manual/manual_searcher.py/0
|
{
"file_path": "archai/archai/supergraph/algos/manual/manual_searcher.py",
"repo_id": "archai",
"token_count": 236
}
| 315 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import torchvision
from overrides import overrides
from torchvision.transforms import transforms
from archai.common import utils
from archai.common.config import Config
from archai.supergraph.datasets.dataset_provider import (
DatasetProvider,
ImgSize,
TrainTestDatasets,
register_dataset_provider,
)
class Mit67Provider(DatasetProvider):
def __init__(self, conf_dataset:Config):
super().__init__(conf_dataset)
self._dataroot = utils.full_path(conf_dataset['dataroot'])
@overrides
def get_datasets(self, load_train:bool, load_test:bool,
transform_train, transform_test)->TrainTestDatasets:
trainset, testset = None, None
if load_train:
trainpath = os.path.join(self._dataroot, 'mit67', 'train')
trainset = torchvision.datasets.ImageFolder(trainpath, transform=transform_train)
if load_test:
testpath = os.path.join(self._dataroot, 'mit67', 'test')
testset = torchvision.datasets.ImageFolder(testpath, transform=transform_test)
return trainset, testset
@overrides
def get_transforms(self, img_size:ImgSize)->tuple:
print(f'IMG SIZE: {img_size}')
if isinstance(img_size, int):
img_size = (img_size, img_size)
# MEAN, STD computed for mit67
MEAN = [0.4893, 0.4270, 0.3625]
STD = [0.2631, 0.2565, 0.2582]
# transformations match that in
# https://github.com/antoyang/NAS-Benchmark/blob/master/DARTS/preproc.py
train_transf = [
transforms.RandomResizedCrop(img_size, scale=(0.75, 1)),
transforms.RandomHorizontalFlip(),
transforms.ColorJitter(
brightness=0.4,
contrast=0.4,
saturation=0.4,
hue=0.2)
]
margin_size = (int(img_size[0] + img_size[0]*0.1), int(img_size[1] + img_size[1]*0.1))
test_transf = [transforms.Resize(margin_size), transforms.CenterCrop(img_size)]
normalize = [
transforms.ToTensor(),
transforms.Normalize(MEAN, STD)
]
train_transform = transforms.Compose(train_transf + normalize)
test_transform = transforms.Compose(test_transf + normalize)
return train_transform, test_transform
register_dataset_provider('mit67', Mit67Provider)
|
archai/archai/supergraph/datasets/providers/mit67_provider.py/0
|
{
"file_path": "archai/archai/supergraph/datasets/providers/mit67_provider.py",
"repo_id": "archai",
"token_count": 1099
}
| 316 |
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class ShakeDropFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, x, training=True, p_drop=0.5, alpha_range=[-1, 1]):
if training:
gate = torch.cuda.FloatTensor([0]).bernoulli_(1 - p_drop)
ctx.save_for_backward(gate)
if gate.item() == 0:
alpha = torch.cuda.FloatTensor(x.size(0)).uniform_(*alpha_range)
alpha = alpha.view(alpha.size(0), 1, 1, 1).expand_as(x)
return alpha * x
else:
return x
else:
return (1 - p_drop) * x
@staticmethod
def backward(ctx, grad_output):
gate = ctx.saved_tensors[0]
if gate.item() == 0:
beta = torch.cuda.FloatTensor(grad_output.size(0)).uniform_(0, 1)
beta = beta.view(beta.size(0), 1, 1, 1).expand_as(grad_output)
beta = Variable(beta)
return beta * grad_output, None, None, None
else:
return grad_output, None, None, None
class ShakeDrop(nn.Module):
def __init__(self, p_drop=0.5, alpha_range=[-1, 1]):
super(ShakeDrop, self).__init__()
self.p_drop = p_drop
self.alpha_range = alpha_range
def forward(self, x):
return ShakeDropFunction.apply(x, self.training, self.p_drop, self.alpha_range)
|
archai/archai/supergraph/models/shakedrop.py/0
|
{
"file_path": "archai/archai/supergraph/models/shakedrop.py",
"repo_id": "archai",
"token_count": 702
}
| 317 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Iterable, Optional, Tuple
import numpy as np
import torch
from overrides import overrides
from torch import Tensor, nn
from archai.common import ml_utils
from archai.supergraph.nas.arch_module import ArchModule
from archai.supergraph.nas.cell import Cell
from archai.supergraph.nas.model_desc import AuxTowerDesc, CellDesc, ModelDesc
from archai.supergraph.nas.operations import DropPath_, Op
class Model(ArchModule):
def __init__(self, model_desc:ModelDesc, droppath:bool, affine:bool):
super().__init__()
# some of these fields are public as finalizer needs access to them
self.desc = model_desc
# TODO: support any number of stems
assert len(model_desc.model_stems)==2, "Model compiler currently only supports 2 stems"
stem0_op = Op.create(model_desc.model_stems[0], affine=affine)
stem1_op = Op.create(model_desc.model_stems[1], affine=affine)
self.model_stems = nn.ModuleList((stem0_op, stem1_op))
self.cells = nn.ModuleList()
self._aux_towers = nn.ModuleList()
for i, (cell_desc, aux_tower_desc) in \
enumerate(zip(model_desc.cell_descs(), model_desc.aux_tower_descs)):
self._build_cell(cell_desc, aux_tower_desc, droppath, affine)
# adaptive pooling output size to 1x1
self.pool_op = Op.create(model_desc.pool_op, affine=affine)
# since ch_p records last cell's output channels
# it indicates the input channel number
self.logits_op = Op.create(model_desc.logits_op, affine=affine)
def _build_cell(self, cell_desc:CellDesc,
aux_tower_desc:Optional[AuxTowerDesc],
droppath:bool, affine:bool)->None:
trainables_from = None if cell_desc.trainables_from==cell_desc.id \
else self.cells[cell_desc.trainables_from]
cell = Cell(cell_desc, affine=affine, droppath=droppath,
trainables_from=trainables_from)
self.cells.append(cell)
self._aux_towers.append(AuxTower(aux_tower_desc) \
if aux_tower_desc else None)
def summary(self)->dict:
all_arch_params = list(self.all_owned()
.param_by_kind(kind=None))
return {
'cell_count': len(self.cells),
#'cell_params': [ml_utils.param_size(c) for c in self.cells]
'params': ml_utils.param_size(self),
'arch_params_len': len(all_arch_params),
'arch_params_numel': np.sum(a.numel() for a in all_arch_params),
'ops': np.sum(len(n.edges) for c in self.desc.cell_descs() for n in c.nodes()),
}
def ops(self)->Iterable[Op]:
for cell in self.cells:
for op in cell.ops():
yield op
@overrides
def forward(self, x)->Tuple[Tensor, Optional[Tensor]]:
#print(torch.cuda.memory_allocated()/1.0e6)
s0 = self.model_stems[0](x)
#print(torch.cuda.memory_allocated()/1.0e6)
s1 = self.model_stems[1](x)
#print(-1, s0.shape, s1.shape, torch.cuda.memory_allocated()/1.0e6)
logits_aux = None
for ci, (cell, aux_tower) in enumerate(zip(self.cells, self._aux_towers)):
#print(s0.shape, s1.shape, end='')
s0, s1 = s1, cell.forward(s0, s1)
#print(ci, s0.shape, s1.shape, torch.cuda.memory_allocated()/1.0e6)
# TODO: this mimics darts but won't work for multiple aux towers
if aux_tower is not None and self.training:
logits_aux = aux_tower(s1)
#print(ci, 'aux', logits_aux.shape)
# s1 is now the last cell's output
out = self.pool_op(s1)
logits = self.logits_op(out) # flatten
#print(-1, 'out', out.shape)
#print(-1, 'logits', logits.shape)
return logits, logits_aux
def device_type(self)->str:
return next(self.parameters()).device.type
def drop_path_prob(self, p:float):
"""Set drop path probability.
This will be called externally so any `DropPath_` modules get new probability.
Typically, every epoch we will reduce this probability.
"""
for module in self.modules():
if isinstance(module, DropPath_):
module.p = p
class AuxTower(nn.Module):
def __init__(self, aux_tower_desc:AuxTowerDesc):
"""assuming input size 14x14"""
# TODO: assert input size?
super().__init__()
self.features = nn.Sequential(
nn.ReLU(inplace=True),
nn.AvgPool2d(5, stride=aux_tower_desc.stride, padding=0, count_include_pad=False),
nn.Conv2d(aux_tower_desc.ch_in, 128, 1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(128, 768, 2, bias=False),
# TODO: This batchnorm was omitted in orginal implementation due to a typo.
nn.BatchNorm2d(768),
nn.ReLU(inplace=True),
)
self.logits_op = nn.Linear(768, aux_tower_desc.n_classes)
def forward(self, x:torch.Tensor):
x = self.features(x)
x = self.logits_op(x.view(x.size(0), -1))
return x
|
archai/archai/supergraph/nas/model.py/0
|
{
"file_path": "archai/archai/supergraph/nas/model.py",
"repo_id": "archai",
"token_count": 2508
}
| 318 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Optional, Tuple
import torch
from overrides import EnforceOverrides
from torch import Tensor, nn
from torch.utils.data import DataLoader
from archai.common import ml_utils
from archai.common.apex_utils import ApexUtils
from archai.common.config import Config
from archai.common.ordered_dict_logger import get_global_logger
from archai.supergraph.utils.metrics import Metrics
logger = get_global_logger()
class Tester(EnforceOverrides):
def __init__(self, conf_val:Config, model:nn.Module, apex:ApexUtils)->None:
self._title = conf_val['title']
self._logger_freq = conf_val['logger_freq']
conf_lossfn = conf_val['lossfn']
self.batch_chunks = conf_val['batch_chunks']
self._apex = apex
self.model = model
self._lossfn = ml_utils.get_lossfn(conf_lossfn).to(apex.device)
self._metrics = None
def test(self, test_dl: DataLoader)->Metrics:
logger.pushd(self._title)
self._metrics = self._create_metrics()
# recreate metrics for this run
self._pre_test()
self._test_epoch(test_dl)
self._post_test()
logger.popd()
return self.get_metrics() # type: ignore
def _test_epoch(self, test_dl: DataLoader)->None:
self._metrics.pre_epoch()
self.model.eval()
steps = len(test_dl)
with torch.no_grad(), logger.pushd('steps'):
for step, (x, y) in enumerate(test_dl):
# derived class might alter the mode through pre/post hooks
assert not self.model.training
logger.pushd(step)
self._pre_step(x, y, self._metrics) # pyright: ignore[reportGeneralTypeIssues]
# divide batch in to chunks if needed so it fits in GPU RAM
if self.batch_chunks > 1:
x_chunks, y_chunks = torch.chunk(x, self.batch_chunks), torch.chunk(y, self.batch_chunks)
else:
x_chunks, y_chunks = (x,), (y,)
logits_chunks = []
loss_sum, loss_count = 0.0, 0
for xc, yc in zip(x_chunks, y_chunks):
xc, yc = xc.to(self.get_device(), non_blocking=True), yc.to(self.get_device(), non_blocking=True)
logits_c = self.model(xc)
tupled_out = isinstance(logits_c, Tuple) and len(logits_c) >=2
if tupled_out:
logits_c = logits_c[0]
loss_c = self._lossfn(logits_c, yc)
loss_sum += loss_c.item() * len(logits_c)
loss_count += len(logits_c)
logits_chunks.append(logits_c.detach().cpu()) # pyright: ignore[reportGeneralTypeIssues]
self._post_step(x, y,
ml_utils.join_chunks(logits_chunks),
torch.tensor(loss_sum/loss_count),
steps, self._metrics) # pyright: ignore[reportGeneralTypeIssues]
# TODO: we possibly need to sync so all replicas are upto date
self._apex.sync_devices()
logger.popd()
self._metrics.post_epoch() # no "val" dataset for the test phase
def get_metrics(self)->Optional[Metrics]:
return self._metrics
def state_dict(self)->dict:
return {
'metrics': self._metrics.state_dict()
}
def get_device(self):
return self._apex.device
def load_state_dict(self, state_dict:dict)->None:
self._metrics.load_state_dict(state_dict['metrics'])
def _pre_test(self)->None:
self._metrics.pre_run()
def _post_test(self)->None:
self._metrics.post_run()
def _pre_step(self, x:Tensor, y:Tensor, metrics:Metrics)->None:
metrics.pre_step(x, y)
def _post_step(self, x:Tensor, y:Tensor, logits:Tensor, loss:Tensor,
steps:int, metrics:Metrics)->None:
metrics.post_step(x, y, logits, loss, steps)
def _create_metrics(self)->Metrics:
return Metrics(self._title, self._apex, logger_freq=self._logger_freq)
|
archai/archai/supergraph/utils/tester.py/0
|
{
"file_path": "archai/archai/supergraph/utils/tester.py",
"repo_id": "archai",
"token_count": 2054
}
| 319 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
import itertools
import math
import os
import shutil
import sys
import time
from typing import Any, Dict, Iterator, Optional, Tuple
import torch
import torch.nn as nn
import torch.optim as optim
from overrides import overrides
from packaging import version
from torch.nn.parallel import DistributedDataParallel
from archai.api.trainer_base import TrainerBase
from archai.common.distributed_utils import all_reduce, sync_workers
from archai.common.ordered_dict_logger import OrderedDictLogger
from archai.datasets.nlp.nvidia_data_loader_utils import (
LMMultiFileIterator,
LMOrderedIterator,
)
from archai.datasets.nlp.nvidia_dataset_provider import NvidiaDatasetProvider
from archai.quantization.mixed_qat import MixedQAT
from archai.quantization.qat import prepare_with_qat, qat_to_float_modules
from archai.trainers.cyclic_cosine_scheduler import CyclicCosineDecayLR
from archai.trainers.lamb_optimizer import JITLamb, Lamb
from archai.trainers.nlp.nvidia_training_args import NvidiaTrainingArguments
logger = OrderedDictLogger(source=__name__)
def save_checkpoint(
output_dir: str,
model: torch.nn.Module,
optimizer: torch.optim.Optimizer,
scheduler: torch.optim.lr_scheduler._LRScheduler,
scaler: torch.cuda.amp.GradScaler,
trainer_state: Dict[str, Any],
fp16: bool,
prefix: Optional[str] = "",
save_all_checkpoints: Optional[bool] = False,
is_best_model: Optional[bool] = False,
) -> None:
"""Save a checkpoint that holds enough information to resume the training.
The checkpoint contains the model's configuration and state, the optimizer's state,
the scheduler's state, the scaler's state (if FP16 precision is used),
and the trainer's state.
If `is_best_model` is `True`, the function will also save a copy of the checkpoint
with the prefix "checkpoint-best".
If `save_all_checkpoints` is `True`, the function will also save a copy of the checkpoint
with the step number in the file name.
Args:
output_dir: Folder where checkpoint should be saved.
model: Instance of model.
optimizer: Instance of optimizer.
scheduler: Instance of scheduler.
scaler: Instance of scaler.
trainer_state: Current trainer state.
fp16: Whether fp16 precision is used or not.
prefix: Prefix which should be added to the checkpoint's file name.
save_all_checkpoints: Whether all `eval_steps` steps should be saved.
is_best_model: Whether best model should be saved.
"""
state = {
"model_config": model.config,
"model_state": model.state_dict(),
"optimizer_state": optimizer.state_dict(),
"scheduler_state": scheduler.state_dict() if scheduler else None,
"scaler_state": scaler.state_dict() if fp16 else None,
"trainer_state": trainer_state,
}
checkpoint_name = prefix + "checkpoint-last.pt"
with sync_workers() as rank:
checkpoint_path = os.path.join(output_dir, checkpoint_name)
if rank == 0:
logger.info(f"Saving checkpoint: {checkpoint_path}")
torch.save(state, checkpoint_path)
if is_best_model:
checkpoint_step_name = prefix + "checkpoint-best.pt"
checkpoint_step_path = os.path.join(output_dir, checkpoint_step_name)
logger.info(f"Saving checkpoint: {checkpoint_step_path}")
shutil.copy(checkpoint_path, checkpoint_step_path)
if save_all_checkpoints:
checkpoint_step_name = prefix + f"checkpoint-{trainer_state['step']}.pt"
checkpoint_step_path = os.path.join(output_dir, checkpoint_step_name)
logger.info(f"Saving checkpoint: {checkpoint_step_path}")
shutil.copy(checkpoint_path, checkpoint_step_path)
class NvidiaTrainer(TrainerBase):
"""NVIDIA-based trainer."""
def __init__(
self,
model: torch.nn.Module,
args: Optional[NvidiaTrainingArguments] = None,
) -> None:
"""Initialize by verifying the model and training arguments, and loading dataset.
Args:
model: Model to be trained or evaluated.
args: NVIDIA-based training arguments. If not provided, a default instance
of `NvidiaTrainingArguments` will be used.
"""
assert isinstance(model, torch.nn.Module), "`model` should be an instance of `torch.nn.Module`."
self.model = model
if args is None:
args = NvidiaTrainingArguments("tmp_trainer")
assert isinstance(args, NvidiaTrainingArguments), "`args` should be an instance of `NvidiaTrainingArguments`."
self.args = args
self.dataset_provider = NvidiaDatasetProvider(
dataset_name=self.args.dataset_name,
dataset_dir=self.args.dataset_dir,
cache_dir=self.args.dataset_cache_dir,
vocab_type=self.args.vocab_type,
vocab_size=self.args.vocab_size,
refresh_cache=self.args.dataset_refresh_cache,
)
self.model.to(self.args.device)
self.trainer_state = {
"iterator": 0,
"epoch": 0,
"batch": 0,
"step": 0,
"best_eval_loss": 1e300,
"log_history": [],
}
def load_checkpoint(self, checkpoint_file_path: str) -> Tuple[int, int, int, int]:
"""Load states from a checkpoint file.
Args:
checkpoint_file_path: Path to the checkpoint file.
Returns:
Current iterator, epoch, batch, and step values.
"""
try:
checkpoint = torch.load(checkpoint_file_path, map_location=self.args.device)
self.model.load_state_dict(checkpoint["model_state"])
self.optimizer.load_state_dict(checkpoint["optimizer_state"])
self.scheduler.load_state_dict(checkpoint["scheduler_state"])
if self.args.fp16:
self.scaler.load_state_dict(checkpoint["amp_state"])
self.trainer_state = checkpoint["trainer_state"]
iterator = self.trainer_state["iterator"]
start_epoch = self.trainer_state["epoch"]
start_batch = self.trainer_state["batch"]
step = self.trainer_state["step"]
return iterator, start_epoch, start_batch, step
except FileNotFoundError:
return 0, 0, 0, 0
def _get_dataloader(self, split: str) -> Iterator:
if split == "train":
input_ids = self.dataset_provider.get_train_dataset()
elif split == "valid":
input_ids = self.dataset_provider.get_val_dataset()
elif split == "test":
input_ids = self.dataset_provider.get_test_dataset()
else:
raise RuntimeError(f"Split: {split} is not supported yet.")
if self.args.dataset_name in ["wt2", "wt103"] or self.args.dataset_name.startswith("olx_"):
return LMOrderedIterator(
input_ids,
self.args.global_batch_size,
self.args.seq_len,
device=self.args.device,
)
elif self.args.dataset_name == "lm1b":
return LMMultiFileIterator(
input_ids,
self.vocab,
self.args.global_batch_size,
self.args.seq_len,
device=self.args.device,
)
else:
raise RuntimeError(f"Dataset: {self.args.dataset_name} is not supported yet.")
def _create_optimizer(self) -> None:
optimizer_name = self.args.optim.lower()
if optimizer_name == "sgd":
self.optimizer = optim.SGD(self.model.parameters(), lr=self.args.learning_rate, momentum=self.args.momentum)
elif optimizer_name == "adam":
self.optimizer = optim.Adam(
self.model.parameters(), lr=self.args.learning_rate, weight_decay=self.args.weight_decay
)
elif optimizer_name == "adagrad":
self.optimizer = optim.Adagrad(self.model.parameters(), lr=self.args.learning_rate)
elif optimizer_name == "lamb":
self.optimizer = Lamb(
self.model.parameters(), lr=self.args.learning_rate, weight_decay=self.args.weight_decay
)
elif optimizer_name == "jitlamb":
self.optimizer = JITLamb(
self.model.parameters(), lr=self.args.learning_rate, weight_decay=self.args.weight_decay
)
else:
raise NotImplementedError(f"Optimizer: {self.args.optim} is not implemented yet.")
def _create_scaler(self) -> None:
self.scaler = None
if self.args.fp16:
self.scaler = torch.cuda.amp.GradScaler()
def _create_scheduler(self) -> None:
scheduler_name = self.args.lr_qat_scheduler_type if self.args.qat else self.args.lr_scheduler_type
if scheduler_name == "cosine":
if self.args.lr_scheduler_max_steps:
max_steps = self.args.lr_scheduler_max_steps
else:
max_steps = self.args.max_steps
self.scheduler = optim.lr_scheduler.CosineAnnealingLR(
self.optimizer, max_steps - self.args.lr_scheduler_warmup_steps, eta_min=self.args.lr_scheduler_min_lr
)
elif scheduler_name == "inv_sqrt":
def lr_lambda(step: int) -> float:
if step == 0 and self.args.lr_scheduler_warmup_steps == 0:
return 1.0
else:
return (
1.0 / (step**0.5)
if step > self.args.lr_scheduler_warmup_steps
else step / (self.args.lr_scheduler_warmup_steps**1.5)
)
self.scheduler = optim.lr_scheduler.LambdaLR(self.optimizer, lr_lambda=lr_lambda)
elif scheduler_name == "cyclic_cosine":
init_decay_steps = int((self.args.max_step - self.args.lr_scheduler_warmup_steps) / 2)
restart_interval = int((self.args.max_step - self.args.lr_scheduler_warmup_steps) / 4)
self.scheduler = CyclicCosineDecayLR(
self.optimizer,
init_decay_steps,
self.args.lr_scheduler_min_lr,
restart_interval,
warmup_epochs=self.args.lr_scheduler_warmup_steps,
warmup_start_lr=self.args.learning_rate * 0.01,
)
elif scheduler_name == "constant":
pass
def _setup_qat(self) -> None:
if self.args.qat:
prepare_with_qat(self.model, onnx_compatible=True)
if self.args.mixed_qat:
self.model = MixedQAT(self.model)
def _setup_distributed_training(self) -> None:
self.dist_model = self.model
if self.args.strategy == "ddp" and torch.distributed.is_initialized():
self.dist_model = DistributedDataParallel(
self.model,
device_ids=[self.args.local_rank],
output_device=self.args.local_rank,
broadcast_buffers=False,
find_unused_parameters=self.args.find_unused_parameters,
)
elif self.args.strategy == "dp":
self.dist_model = nn.DataParallel(self.model, dim=1)
def _training_step_chunk(
self, input_ids: torch.LongTensor, labels: torch.LongTensor, autocast: torch.autocast
) -> float:
with autocast:
loss = self.dist_model(input_ids, labels=input_ids)[0]
loss = loss.float().mean().type_as(loss) / self.args.gradient_accumulation_steps
if self.args.fp16:
self.scaler.scale(loss).backward()
else:
loss.backward()
return loss.float().item()
def _training_step(
self,
train_dataloader: Iterator,
eval_dataloader: Iterator,
iterator: int,
epoch: int,
start_batch: int,
step: int,
) -> None:
self.model.train()
train_loss, log_step, n_labels_tokens = 0.0, 0, 0
best_eval_loss = self.trainer_state["best_eval_loss"]
start_time = time.time()
# `lm1b` uses a different style of data loader
if self.args.dataset_name != "lm1b":
train_iterator = train_dataloader.get_fixlen_iter(start=iterator)
else:
train_iterator = train_dataloader
# Support `bf16` based on PyTorch version and CUDA availability
autocast = torch.autocast(self.args.device.type, enabled=self.args.fp16)
if version.parse(torch.__version__) >= version.parse("1.10") and self.args.device.type != "cpu":
dtype = torch.bfloat16 if torch.cuda.is_bf16_supported() else torch.float16
autocast = torch.cuda.amp.autocast(enabled=self.args.fp16, dtype=dtype)
for batch, (input_ids, labels, _, _) in enumerate(train_iterator, start=start_batch + 1):
log_step += 1
n_labels_tokens += labels.numel()
for param in self.model.parameters():
param.grad = None
# Split into chunks for gradient accumulation
input_ids_chunks = torch.chunk(input_ids, self.args.gradient_accumulation_steps, 0)
labels_chunks = torch.chunk(labels, self.args.gradient_accumulation_steps, 0)
for i in range(self.args.gradient_accumulation_steps):
input_ids_chunk = input_ids_chunks[i].contiguous()
labels_chunk = labels_chunks[i].contiguous()
train_loss_chunk = self._training_step_chunk(
input_ids_chunk,
labels_chunk,
autocast,
)
train_loss += train_loss_chunk
if self.args.fp16:
self.scaler.unscale_(self.optimizer)
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.args.max_grad_norm)
else:
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.args.max_grad_norm)
if self.args.fp16:
self.scaler.step(self.optimizer)
self.scaler.update()
else:
self.optimizer.step()
# Learning rate annealing
step += 1
if self.args.lr_scheduler_type in ["cosine", "constant"]:
if step < self.args.lr_scheduler_warmup_steps:
curr_lr = self.args.learning_rate * step / self.args.lr_scheduler_warmup_steps
self.optimizer.param_groups[0]["lr"] = curr_lr
else:
if self.args.lr_scheduler_type == "cosine":
self.scheduler.step(step - self.args.lr_scheduler_warmup_steps)
elif self.args.lr_scheduler_type in ["inv_sqrt", "cyclic_cosine"]:
self.scheduler.step(step)
# Logging
if step % self.args.logging_steps == 0:
elapsed_time = time.time() - start_time
lr = self.optimizer.param_groups[0]["lr"]
loss = train_loss / log_step
loss = all_reduce(loss, op="mean")
batch_time = elapsed_time / log_step
batch_time = all_reduce(batch_time, op="max")
throughput = n_labels_tokens / elapsed_time
throughput = all_reduce(throughput, op="sum")
train_loss, log_step, n_labels_tokens = 0.0, 0, 0
self.trainer_state["log_history"].append(
{
"epoch": epoch,
"learning_rate": lr,
"loss": loss,
"ppl": math.exp(loss),
"step": step,
}
)
logger.info(
f"Epoch: {epoch} | Step: {step} | "
f"Batch: {batch} / {train_dataloader.n_batch} | LR: {lr:.3e} | "
f"ms/batch: {batch_time*1000:.1f} | tok/s: {throughput:.0f} | "
f"Loss: {loss:.3f} | PPL: {math.exp(loss):.3f}"
)
start_time = time.time()
do_periodic_eval = step % self.args.eval_steps == 0
is_final_step = step == self.args.max_steps
# Evaluation and checkpoint
if (do_periodic_eval or is_final_step) and self.args.do_eval:
eval_loss, eval_time = self._evaluation_step(eval_dataloader)
eval_loss = all_reduce(eval_loss, op="mean")
self.trainer_state["log_history"].append(
{
"epoch": epoch,
"eval_idx": (step // self.args.eval_steps) - 1,
"eval_runtime": eval_time,
"eval_loss": eval_loss,
"eval_ppl": math.exp(eval_loss),
"step": step,
}
)
logger.info(
f"Eval: {(step // self.args.eval_steps) - 1} | "
f"Step: {step} | Time: {eval_time:.2f}s | "
f"Loss: {eval_loss:.3f} | PPL: {math.exp(eval_loss):.3f}"
)
iterator = train_dataloader.last_iter
save_model = copy.deepcopy(self.model)
prefix = ""
self.trainer_state["iterator"] = iterator
self.trainer_state["epoch"] = epoch
self.trainer_state["batch"] = batch
self.trainer_state["step"] = step
# Model needs to be converted back to FP32 when using QAT
if self.args.qat:
qat_to_float_modules(save_model)
prefix = "qat-"
# Save original FP32 model when using MixedQAT
if self.args.mixed_qat:
save_model = save_model.model
prefix = "mixed-qat-"
# Check if current model is the best one
is_best_model = eval_loss < best_eval_loss
if is_best_model:
best_eval_loss = eval_loss
self.trainer_state["best_eval_loss"] = best_eval_loss
save_checkpoint(
self.args.output_dir,
save_model,
self.optimizer,
self.scheduler,
self.scaler,
self.trainer_state,
self.args.fp16,
prefix=prefix,
save_all_checkpoints=self.args.save_all_checkpoints,
is_best_model=is_best_model,
)
if is_final_step:
break
return step
@overrides
def train(self, checkpoint_file_path: Optional[str] = "") -> Dict[str, Any]:
"""Train a model.
Args:
checkpoint_file_path: Path to the checkpoint that will be used
to resume the training.
Returns:
Training-related metrics.
"""
self._create_optimizer()
self._create_scaler()
self._create_scheduler()
if checkpoint_file_path:
iterator, start_epoch, start_batch, step = self.load_checkpoint(checkpoint_file_path)
else:
iterator, start_epoch, start_batch, step = 0, 0, 0, 0
if step >= self.args.max_steps:
sys.exit(1)
self._setup_qat()
self._setup_distributed_training()
train_dataloader = self._get_dataloader("train")
eval_dataloader = self._get_dataloader("valid")
logger.info("Starting training ...")
logger.debug(f"Training arguments: {self.args.to_dict()}")
start_time = time.time()
try:
for epoch in itertools.count(start=start_epoch):
if self.args.iterator_roll:
train_dataloader.roll(seed=self.args.seed + epoch)
step = self._training_step(train_dataloader, eval_dataloader, iterator, epoch, start_batch, step)
iterator, start_batch = 0, 0
if step == self.args.max_steps:
logger.info("End of training ...")
break
except KeyboardInterrupt:
logger.info("Exiting from training ...")
end_time = time.time()
train_time = end_time - start_time
logger.info(f"Training time: {train_time:.3f} seconds")
def _evaluation_step(self, eval_dataloader: Iterator) -> Tuple[float, float]:
self.model.eval()
eval_loss, n_tokens = 0.0, 0
start_time = time.time()
with torch.no_grad():
for _, (input_ids, _, _, warm) in enumerate(eval_dataloader):
loss = self.model(input_ids, labels=input_ids)[0]
tokens = input_ids.numel()
if warm:
eval_loss += tokens * loss.float().mean().item()
n_tokens += tokens
eval_loss /= n_tokens
end_time = time.time()
self.model.train()
return eval_loss, end_time - start_time
@overrides
def evaluate(self, eval_dataloader: Optional[Iterator] = None) -> Dict[str, Any]:
"""Evaluate a model.
Args:
eval_dataloader: Evaluation-based data loader. If not supplied, it will
default to the one available in pre-loaded dataset.
Returns:
Evaluation-related metrics.
"""
if not eval_dataloader:
eval_dataloader = self._get_dataloader("test")
eval_loss, eval_time = self._evaluation_step(eval_dataloader)
eval_metrics = {
"eval_time": eval_time,
"eval_loss": eval_loss,
"eval_ppl": math.exp(eval_loss),
"eval_bpc": eval_loss / math.log(2),
}
return eval_metrics
@overrides
def predict(self) -> None:
"""Predict with a model."""
raise NotImplementedError
def fine_tune_qat(self, model: Optional[torch.nn.Module] = None, checkpoint_file_path: Optional[str] = "") -> None:
"""Fine-tune a model with QAT.
Users are allowed to pass in a different model (e.g., without dropout) than the one
instantiated with `NvidiaTrainer`, as well as a pre-trained checkpoint file to load
the weights from a previous training.
Args:
model: Model to be fine-tuned.
checkpoint_file_path: Path to the checkpoint used to resume training.
"""
if model:
assert isinstance(model, torch.nn.Module), "`model` should be an instance of `torch.nn.Module`."
self.model = model.to(self.args.device)
# QAT-based arguments
self.args.max_steps = 10000
self.args.eval_steps = 1000
self.args.optim = "adam"
self.args.learning_rate /= 100
self.args.lr_scheduler_min_lr /= 100
self.args.lr_scheduler_warmup_steps = 1000
self.args.qat = True
self.args.mixed_qat = False
# Re-load the checkpoint and perform the fine-tuning
self.load_checkpoint(checkpoint_file_path)
self.train()
|
archai/archai/trainers/nlp/nvidia_trainer.py/0
|
{
"file_path": "archai/archai/trainers/nlp/nvidia_trainer.py",
"repo_id": "archai",
"token_count": 11485
}
| 320 |
autoaug:
model:
type: 'wresnet40_2'
loader:
aug: 'fa_reduced_cifar10'
cutout: 16
batch: 512
epochs: 200
lr_schedule:
type: 'cosine'
warmup:
multiplier: 4
epochs: 5
optimizer:
lr: 0.1
type: 'sgd'
nesterov: True
decay: 0.0002
|
archai/confs/aug/wresnet40x2_cifar10_b512.yaml/0
|
{
"file_path": "archai/confs/aug/wresnet40x2_cifar10_b512.yaml",
"repo_id": "archai",
"token_count": 153
}
| 321 |
__include__: './size_224x224_base.yaml' # default dataset settings are for cifar
common:
seed: 0.0
toy_mode: # this section will be used by toy.yaml to setup the toy mode
max_batches: 25
train_batch: 64
test_batch: 64
# we use imagenet only for eval, so search dataset is still cifar10 but eval dataset is imagenet
dataset_eval:
name: 'imagenet'
n_classes: 1000
channels: 3 # number of channels in image
storage_name: 'ImageNet' # name of folder or tar file to copy from cloud storage
max_batches: -1 # if >= 0 then only these many batches are generated (useful for debugging)
nas:
eval:
model_desc:
n_cells: 14 # number of cells
aux_tower_stride: 2 # stride that aux tower should use, 3 is good for 32x32 images, 2 for imagenet
dataset:
_copy: '/dataset_eval'
model_post_op: 'pool_avg2d7x7'
model_stems:
ops: ['stem_conv3x3_s4', 'stem_conv3x3_s4s2']
init_node_ch: 48 # num of input/output channels for nodes in 1st cell
stem_multiplier: 1 # output channels multiplier for the stem
# darts setup
# loader:
# batch: 128
# dataset:
# _copy: '/dataset_eval'
# trainer:
# apex: # this is overriden in search and eval individually
# enabled: False # global switch to disable everything apex
# distributed_enabled: False # enable/disable distributed mode
# aux_weight: 0.4 # weight for loss from auxiliary towers in test time arch
# drop_path_prob: 0.0 # probability that given edge will be dropped
# epochs: 250
# lossfn: # TODO: this is perhaps reversed for test/train?
# type: 'CrossEntropyLabelSmooth'
# smoothing: 0.1 # label smoothing
# optimizer:
# lr: 0.1 # init learning rate
# decay: 3.0e-5
# lr_schedule:
# type: 'step'
# decay_period: 1 # epochs between two learning rate decays
# gamma: 0.97 # learning rate decay
# NVidia benchmark setup DGX1_RN50_AMP_90E.sh
# Enable amp and distributed 8 GPUs in apex section
loader:
batch: 256
train_workers: 5
test_workers: 5
dataset:
_copy: '/dataset_eval'
trainer:
apex:
enabled: True # global switch to disable everything apex
distributed_enabled: True # enable/disable distributed mode
loss_scale: "128.0" # loss scaling mode for mixed prec, must be string reprenting float ot "dynamic"
aux_weight: 0.0 # weight for loss from auxiliary towers in test time arch
drop_path_prob: 0.0 # probability that given edge will be dropped
epochs: 250
lossfn: # TODO: this is perhaps reversed for test/train?
type: 'CrossEntropyLabelSmooth'
smoothing: 0.1 # label smoothing
optimizer:
lr: 2.048 # init learning rate
decay: 3.05e-5
decay_bn: .NaN # if .NaN then same as decay otherwise apply different decay to BN layers
momentum: 0.875 # pytorch default is 0.0
lr_schedule:
type: 'cosine'
min_lr: 0.0 # min learning rate to se bet in eta_min param of scheduler
warmup: # increases LR for 0 to current in specified epochs and then hands over to main scheduler
multiplier: 1.0
epochs: 8
|
archai/confs/datasets/imagenet.yaml/0
|
{
"file_path": "archai/confs/datasets/imagenet.yaml",
"repo_id": "archai",
"token_count": 1279
}
| 322 |
#!/bin/bash
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# Runs an interactive bash within the container
# Enhanced security by gVisor / without GPUs
docker run --rm \
--runtime=runsc \
--name nvidia22.10-archai \
--shm-size=10g \
--ipc=host \
--ulimit memlock=-1 \
--ulimit stack=67108864 \
-e NCCL_P2P_LEVEL=NVL \
-it nvidia22.10-archai:latest
|
archai/docker/run_container_with_gvisor.sh/0
|
{
"file_path": "archai/docker/run_container_with_gvisor.sh",
"repo_id": "archai",
"token_count": 161
}
| 323 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.