text
stringlengths 5
22M
| id
stringlengths 12
177
| metadata
dict | __index_level_0__
int64 0
1.37k
|
---|---|---|---|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import time
from collections import OrderedDict
from options.train_options import TrainOptions
from data.data_loader import CreateDataLoader
from models.mapping_model import Pix2PixHDModel_Mapping
import util.util as util
from util.visualizer import Visualizer
import os
import numpy as np
import torch
import torchvision.utils as vutils
from torch.autograd import Variable
import datetime
import random
opt = TrainOptions().parse()
visualizer = Visualizer(opt)
iter_path = os.path.join(opt.checkpoints_dir, opt.name, 'iter.txt')
if opt.continue_train:
try:
start_epoch, epoch_iter = np.loadtxt(iter_path , delimiter=',', dtype=int)
except:
start_epoch, epoch_iter = 1, 0
visualizer.print_save('Resuming from epoch %d at iteration %d' % (start_epoch-1, epoch_iter))
else:
start_epoch, epoch_iter = 1, 0
if opt.which_epoch != "latest":
start_epoch=int(opt.which_epoch)
visualizer.print_save('Notice : Resuming from epoch %d at iteration %d' % (start_epoch - 1, epoch_iter))
opt.start_epoch=start_epoch
### temp for continue train unfixed decoder
data_loader = CreateDataLoader(opt)
dataset = data_loader.load_data()
dataset_size = len(dataset) * opt.batchSize
print('#training images = %d' % dataset_size)
model = Pix2PixHDModel_Mapping()
model.initialize(opt)
path = os.path.join(opt.checkpoints_dir, opt.name, 'model.txt')
fd = open(path, 'w')
if opt.use_skip_model:
fd.write(str(model.mapping_net))
fd.close()
else:
fd.write(str(model.netG_A))
fd.write(str(model.mapping_net))
fd.close()
if opt.isTrain and len(opt.gpu_ids) > 1:
model = torch.nn.DataParallel(model, device_ids=opt.gpu_ids)
total_steps = (start_epoch-1) * dataset_size + epoch_iter
display_delta = total_steps % opt.display_freq
print_delta = total_steps % opt.print_freq
save_delta = total_steps % opt.save_latest_freq
### used for recovering training
for epoch in range(start_epoch, opt.niter + opt.niter_decay + 1):
epoch_s_t=datetime.datetime.now()
epoch_start_time = time.time()
if epoch != start_epoch:
epoch_iter = epoch_iter % dataset_size
for i, data in enumerate(dataset, start=epoch_iter):
iter_start_time = time.time()
total_steps += opt.batchSize
epoch_iter += opt.batchSize
# whether to collect output images
save_fake = total_steps % opt.display_freq == display_delta
############## Forward Pass ######################
#print(pair)
losses, generated = model(Variable(data['label']), Variable(data['inst']),
Variable(data['image']), Variable(data['feat']), infer=save_fake)
# sum per device losses
losses = [ torch.mean(x) if not isinstance(x, int) else x for x in losses ]
loss_dict = dict(zip(model.module.loss_names, losses))
# calculate final loss scalar
loss_D = (loss_dict['D_fake'] + loss_dict['D_real']) * 0.5
loss_G = loss_dict['G_GAN'] + loss_dict.get('G_GAN_Feat',0) + loss_dict.get('G_VGG',0) + loss_dict.get('G_Feat_L2', 0) +loss_dict.get('Smooth_L1', 0)+loss_dict.get('G_Feat_L2_Stage_1',0)
#loss_G = loss_dict['G_Feat_L2']
############### Backward Pass ####################
# update generator weights
model.module.optimizer_mapping.zero_grad()
loss_G.backward()
model.module.optimizer_mapping.step()
# update discriminator weights
model.module.optimizer_D.zero_grad()
loss_D.backward()
model.module.optimizer_D.step()
############## Display results and errors ##########
### print out errors
if i == 0 or total_steps % opt.print_freq == print_delta:
errors = {k: v.data if not isinstance(v, int) else v for k, v in loss_dict.items()}
t = (time.time() - iter_start_time) / opt.batchSize
visualizer.print_current_errors(epoch, epoch_iter, errors, t,model.module.old_lr)
visualizer.plot_current_errors(errors, total_steps)
### display output images
if save_fake:
if not os.path.exists(opt.outputs_dir + opt.name):
os.makedirs(opt.outputs_dir + opt.name)
imgs_num = 5
if opt.NL_use_mask:
mask=data['inst'][:imgs_num]
mask=mask.repeat(1,3,1,1)
imgs = torch.cat((data['label'][:imgs_num], mask,generated.data.cpu()[:imgs_num], data['image'][:imgs_num]), 0)
else:
imgs = torch.cat((data['label'][:imgs_num], generated.data.cpu()[:imgs_num], data['image'][:imgs_num]), 0)
imgs=(imgs+1.)/2.0 ## de-normalize
try:
image_grid = vutils.save_image(imgs, opt.outputs_dir + opt.name + '/' + str(epoch) + '_' + str(total_steps) + '.png',
nrow=imgs_num, padding=0, normalize=True)
except OSError as err:
print(err)
if epoch_iter >= dataset_size:
break
# end of epoch
epoch_e_t=datetime.datetime.now()
iter_end_time = time.time()
print('End of epoch %d / %d \t Time Taken: %s' %
(epoch, opt.niter + opt.niter_decay, str(epoch_e_t-epoch_s_t)))
### save model for this epoch
if epoch % opt.save_epoch_freq == 0:
print('saving the model at the end of epoch %d, iters %d' % (epoch, total_steps))
model.module.save('latest')
model.module.save(epoch)
np.savetxt(iter_path, (epoch+1, 0), delimiter=',', fmt='%d')
### instead of only training the local enhancer, train the entire network after certain iterations
if (opt.niter_fix_global != 0) and (epoch == opt.niter_fix_global):
model.module.update_fixed_params()
### linearly decay learning rate after certain iterations
if epoch > opt.niter:
model.module.update_learning_rate()
|
Bringing-Old-Photos-Back-to-Life/Global/train_mapping.py/0
|
{
"file_path": "Bringing-Old-Photos-Back-to-Life/Global/train_mapping.py",
"repo_id": "Bringing-Old-Photos-Back-to-Life",
"token_count": 2535
}
| 161 |
# TEXT ENCODER CONFIG
text_model: 'gpt2'
transformer_embed_dim: 768
freeze_text_encoder_weights: True
# AUDIO ENCODER CONFIG
audioenc_name: 'HTSAT'
out_emb: 768
sampling_rate: 44100
duration: 7
fmin: 50
fmax: 8000
n_fft: 1024
hop_size: 320
mel_bins: 64
window_size: 1024
# PROJECTION SPACE CONFIG
d_proj: 1024
temperature: 0.003
# TRAINING AND EVALUATION CONFIG
batch_size: 128
num_classes: 527
# CLAPCAP CONFIG
clapcap_model: 'ClapCaption'
text_decoder: 'gpt2'
prefix_length: 40
prefix_length_clip: 40
mapping_type: 'transformer'
num_layers: 8
normalize_prefix: True
freeze_gpt_weights: True
|
CLAP/msclap/configs/config_clapcap.yml/0
|
{
"file_path": "CLAP/msclap/configs/config_clapcap.yml",
"repo_id": "CLAP",
"token_count": 240
}
| 162 |
.. role:: hidden
:class: hidden-section
.. _Criterions:
Criterions
==========
Criterions compute the loss function given the model and batch, roughly::
loss = criterion(model, batch)
.. automodule:: fairseq.criterions
:members:
.. autoclass:: fairseq.criterions.FairseqCriterion
:members:
:undoc-members:
.. autoclass:: fairseq.criterions.adaptive_loss.AdaptiveLoss
:members:
:undoc-members:
.. autoclass:: fairseq.criterions.composite_loss.CompositeLoss
:members:
:undoc-members:
.. autoclass:: fairseq.criterions.cross_entropy.CrossEntropyCriterion
:members:
:undoc-members:
.. autoclass:: fairseq.criterions.label_smoothed_cross_entropy.LabelSmoothedCrossEntropyCriterion
:members:
:undoc-members:
|
COCO-LM/fairseq/docs/criterions.rst/0
|
{
"file_path": "COCO-LM/fairseq/docs/criterions.rst",
"repo_id": "COCO-LM",
"token_count": 284
}
| 163 |
Tutorial: Classifying Names with a Character-Level RNN
======================================================
In this tutorial we will extend fairseq to support *classification* tasks. In
particular we will re-implement the PyTorch tutorial for `Classifying Names with
a Character-Level RNN <https://pytorch.org/tutorials/intermediate/char_rnn_classification_tutorial.html>`_
in fairseq. It is recommended to quickly skim that tutorial before beginning
this one.
This tutorial covers:
1. **Preprocessing the data** to create dictionaries.
2. **Registering a new Model** that encodes an input sentence with a simple RNN
and predicts the output label.
3. **Registering a new Task** that loads our dictionaries and dataset.
4. **Training the Model** using the existing command-line tools.
5. **Writing an evaluation script** that imports fairseq and allows us to
interactively evaluate our model on new inputs.
1. Preprocessing the data
-------------------------
The original tutorial provides raw data, but we'll work with a modified version
of the data that is already tokenized into characters and split into separate
train, valid and test sets.
Download and extract the data from here:
`tutorial_names.tar.gz <https://dl.fbaipublicfiles.com/fairseq/data/tutorial_names.tar.gz>`_
Once extracted, let's preprocess the data using the :ref:`fairseq-preprocess`
command-line tool to create the dictionaries. While this tool is primarily
intended for sequence-to-sequence problems, we're able to reuse it here by
treating the label as a "target" sequence of length 1. We'll also output the
preprocessed files in "raw" format using the ``--dataset-impl`` option to
enhance readability:
.. code-block:: console
> fairseq-preprocess \
--trainpref names/train --validpref names/valid --testpref names/test \
--source-lang input --target-lang label \
--destdir names-bin --dataset-impl raw
After running the above command you should see a new directory,
:file:`names-bin/`, containing the dictionaries for *inputs* and *labels*.
2. Registering a new Model
--------------------------
Next we'll register a new model in fairseq that will encode an input sentence
with a simple RNN and predict the output label. Compared to the original PyTorch
tutorial, our version will also work with batches of data and GPU Tensors.
First let's copy the simple RNN module implemented in the `PyTorch tutorial
<https://pytorch.org/tutorials/intermediate/char_rnn_classification_tutorial.html#creating-the-network>`_.
Create a new file named :file:`fairseq/models/rnn_classifier.py` with the
following contents::
import torch
import torch.nn as nn
class RNN(nn.Module):
def __init__(self, input_size, hidden_size, output_size):
super(RNN, self).__init__()
self.hidden_size = hidden_size
self.i2h = nn.Linear(input_size + hidden_size, hidden_size)
self.i2o = nn.Linear(input_size + hidden_size, output_size)
self.softmax = nn.LogSoftmax(dim=1)
def forward(self, input, hidden):
combined = torch.cat((input, hidden), 1)
hidden = self.i2h(combined)
output = self.i2o(combined)
output = self.softmax(output)
return output, hidden
def initHidden(self):
return torch.zeros(1, self.hidden_size)
We must also *register* this model with fairseq using the
:func:`~fairseq.models.register_model` function decorator. Once the model is
registered we'll be able to use it with the existing :ref:`Command-line Tools`.
All registered models must implement the :class:`~fairseq.models.BaseFairseqModel`
interface, so we'll create a small wrapper class in the same file and register
it in fairseq with the name ``'rnn_classifier'``::
from fairseq.models import BaseFairseqModel, register_model
# Note: the register_model "decorator" should immediately precede the
# definition of the Model class.
@register_model('rnn_classifier')
class FairseqRNNClassifier(BaseFairseqModel):
@staticmethod
def add_args(parser):
# Models can override this method to add new command-line arguments.
# Here we'll add a new command-line argument to configure the
# dimensionality of the hidden state.
parser.add_argument(
'--hidden-dim', type=int, metavar='N',
help='dimensionality of the hidden state',
)
@classmethod
def build_model(cls, args, task):
# Fairseq initializes models by calling the ``build_model()``
# function. This provides more flexibility, since the returned model
# instance can be of a different type than the one that was called.
# In this case we'll just return a FairseqRNNClassifier instance.
# Initialize our RNN module
rnn = RNN(
# We'll define the Task in the next section, but for now just
# notice that the task holds the dictionaries for the "source"
# (i.e., the input sentence) and "target" (i.e., the label).
input_size=len(task.source_dictionary),
hidden_size=args.hidden_dim,
output_size=len(task.target_dictionary),
)
# Return the wrapped version of the module
return FairseqRNNClassifier(
rnn=rnn,
input_vocab=task.source_dictionary,
)
def __init__(self, rnn, input_vocab):
super(FairseqRNNClassifier, self).__init__()
self.rnn = rnn
self.input_vocab = input_vocab
# The RNN module in the tutorial expects one-hot inputs, so we can
# precompute the identity matrix to help convert from indices to
# one-hot vectors. We register it as a buffer so that it is moved to
# the GPU when ``cuda()`` is called.
self.register_buffer('one_hot_inputs', torch.eye(len(input_vocab)))
def forward(self, src_tokens, src_lengths):
# The inputs to the ``forward()`` function are determined by the
# Task, and in particular the ``'net_input'`` key in each
# mini-batch. We'll define the Task in the next section, but for
# now just know that *src_tokens* has shape `(batch, src_len)` and
# *src_lengths* has shape `(batch)`.
bsz, max_src_len = src_tokens.size()
# Initialize the RNN hidden state. Compared to the original PyTorch
# tutorial we'll also handle batched inputs and work on the GPU.
hidden = self.rnn.initHidden()
hidden = hidden.repeat(bsz, 1) # expand for batched inputs
hidden = hidden.to(src_tokens.device) # move to GPU
for i in range(max_src_len):
# WARNING: The inputs have padding, so we should mask those
# elements here so that padding doesn't affect the results.
# This is left as an exercise for the reader. The padding symbol
# is given by ``self.input_vocab.pad()`` and the unpadded length
# of each input is given by *src_lengths*.
# One-hot encode a batch of input characters.
input = self.one_hot_inputs[src_tokens[:, i].long()]
# Feed the input to our RNN.
output, hidden = self.rnn(input, hidden)
# Return the final output state for making a prediction
return output
Finally let's define a *named architecture* with the configuration for our
model. This is done with the :func:`~fairseq.models.register_model_architecture`
function decorator. Thereafter this named architecture can be used with the
``--arch`` command-line argument, e.g., ``--arch pytorch_tutorial_rnn``::
from fairseq.models import register_model_architecture
# The first argument to ``register_model_architecture()`` should be the name
# of the model we registered above (i.e., 'rnn_classifier'). The function we
# register here should take a single argument *args* and modify it in-place
# to match the desired architecture.
@register_model_architecture('rnn_classifier', 'pytorch_tutorial_rnn')
def pytorch_tutorial_rnn(args):
# We use ``getattr()`` to prioritize arguments that are explicitly given
# on the command-line, so that the defaults defined below are only used
# when no other value has been specified.
args.hidden_dim = getattr(args, 'hidden_dim', 128)
3. Registering a new Task
-------------------------
Now we'll register a new :class:`~fairseq.tasks.FairseqTask` that will load our
dictionaries and dataset. Tasks can also control how the data is batched into
mini-batches, but in this tutorial we'll reuse the batching provided by
:class:`fairseq.data.LanguagePairDataset`.
Create a new file named :file:`fairseq/tasks/simple_classification.py` with the
following contents::
import os
import torch
from fairseq.data import Dictionary, LanguagePairDataset
from fairseq.tasks import FairseqTask, register_task
@register_task('simple_classification')
class SimpleClassificationTask(LegacyFairseqTask):
@staticmethod
def add_args(parser):
# Add some command-line arguments for specifying where the data is
# located and the maximum supported input length.
parser.add_argument('data', metavar='FILE',
help='file prefix for data')
parser.add_argument('--max-positions', default=1024, type=int,
help='max input length')
@classmethod
def setup_task(cls, args, **kwargs):
# Here we can perform any setup required for the task. This may include
# loading Dictionaries, initializing shared Embedding layers, etc.
# In this case we'll just load the Dictionaries.
input_vocab = Dictionary.load(os.path.join(args.data, 'dict.input.txt'))
label_vocab = Dictionary.load(os.path.join(args.data, 'dict.label.txt'))
print('| [input] dictionary: {} types'.format(len(input_vocab)))
print('| [label] dictionary: {} types'.format(len(label_vocab)))
return SimpleClassificationTask(args, input_vocab, label_vocab)
def __init__(self, args, input_vocab, label_vocab):
super().__init__(args)
self.input_vocab = input_vocab
self.label_vocab = label_vocab
def load_dataset(self, split, **kwargs):
"""Load a given dataset split (e.g., train, valid, test)."""
prefix = os.path.join(self.args.data, '{}.input-label'.format(split))
# Read input sentences.
sentences, lengths = [], []
with open(prefix + '.input', encoding='utf-8') as file:
for line in file:
sentence = line.strip()
# Tokenize the sentence, splitting on spaces
tokens = self.input_vocab.encode_line(
sentence, add_if_not_exist=False,
)
sentences.append(tokens)
lengths.append(tokens.numel())
# Read labels.
labels = []
with open(prefix + '.label', encoding='utf-8') as file:
for line in file:
label = line.strip()
labels.append(
# Convert label to a numeric ID.
torch.LongTensor([self.label_vocab.add_symbol(label)])
)
assert len(sentences) == len(labels)
print('| {} {} {} examples'.format(self.args.data, split, len(sentences)))
# We reuse LanguagePairDataset since classification can be modeled as a
# sequence-to-sequence task where the target sequence has length 1.
self.datasets[split] = LanguagePairDataset(
src=sentences,
src_sizes=lengths,
src_dict=self.input_vocab,
tgt=labels,
tgt_sizes=torch.ones(len(labels)), # targets have length 1
tgt_dict=self.label_vocab,
left_pad_source=False,
# Since our target is a single class label, there's no need for
# teacher forcing. If we set this to ``True`` then our Model's
# ``forward()`` method would receive an additional argument called
# *prev_output_tokens* that would contain a shifted version of the
# target sequence.
input_feeding=False,
)
def max_positions(self):
"""Return the max input length allowed by the task."""
# The source should be less than *args.max_positions* and the "target"
# has max length 1.
return (self.args.max_positions, 1)
@property
def source_dictionary(self):
"""Return the source :class:`~fairseq.data.Dictionary`."""
return self.input_vocab
@property
def target_dictionary(self):
"""Return the target :class:`~fairseq.data.Dictionary`."""
return self.label_vocab
# We could override this method if we wanted more control over how batches
# are constructed, but it's not necessary for this tutorial since we can
# reuse the batching provided by LanguagePairDataset.
#
# def get_batch_iterator(
# self, dataset, max_tokens=None, max_sentences=None, max_positions=None,
# ignore_invalid_inputs=False, required_batch_size_multiple=1,
# seed=1, num_shards=1, shard_id=0, num_workers=0, epoch=1,
# data_buffer_size=0, disable_iterator_cache=False,
# ):
# (...)
4. Training the Model
---------------------
Now we're ready to train the model. We can use the existing :ref:`fairseq-train`
command-line tool for this, making sure to specify our new Task (``--task
simple_classification``) and Model architecture (``--arch
pytorch_tutorial_rnn``):
.. note::
You can also configure the dimensionality of the hidden state by passing the
``--hidden-dim`` argument to :ref:`fairseq-train`.
.. code-block:: console
> fairseq-train names-bin \
--task simple_classification \
--arch pytorch_tutorial_rnn \
--optimizer adam --lr 0.001 --lr-shrink 0.5 \
--max-tokens 1000
(...)
| epoch 027 | loss 1.200 | ppl 2.30 | wps 15728 | ups 119.4 | wpb 116 | bsz 116 | num_updates 3726 | lr 1.5625e-05 | gnorm 1.290 | clip 0% | oom 0 | wall 32 | train_wall 21
| epoch 027 | valid on 'valid' subset | valid_loss 1.41304 | valid_ppl 2.66 | num_updates 3726 | best 1.41208
| done training in 31.6 seconds
The model files should appear in the :file:`checkpoints/` directory.
5. Writing an evaluation script
-------------------------------
Finally we can write a short script to evaluate our model on new inputs. Create
a new file named :file:`eval_classifier.py` with the following contents::
from fairseq import checkpoint_utils, data, options, tasks
# Parse command-line arguments for generation
parser = options.get_generation_parser(default_task='simple_classification')
args = options.parse_args_and_arch(parser)
# Setup task
task = tasks.setup_task(args)
# Load model
print('| loading model from {}'.format(args.path))
models, _model_args = checkpoint_utils.load_model_ensemble([args.path], task=task)
model = models[0]
while True:
sentence = input('\nInput: ')
# Tokenize into characters
chars = ' '.join(list(sentence.strip()))
tokens = task.source_dictionary.encode_line(
chars, add_if_not_exist=False,
)
# Build mini-batch to feed to the model
batch = data.language_pair_dataset.collate(
samples=[{'id': -1, 'source': tokens}], # bsz = 1
pad_idx=task.source_dictionary.pad(),
eos_idx=task.source_dictionary.eos(),
left_pad_source=False,
input_feeding=False,
)
# Feed batch to the model and get predictions
preds = model(**batch['net_input'])
# Print top 3 predictions and their log-probabilities
top_scores, top_labels = preds[0].topk(k=3)
for score, label_idx in zip(top_scores, top_labels):
label_name = task.target_dictionary.string([label_idx])
print('({:.2f})\t{}'.format(score, label_name))
Now we can evaluate our model interactively. Note that we have included the
original data path (:file:`names-bin/`) so that the dictionaries can be loaded:
.. code-block:: console
> python eval_classifier.py names-bin --path checkpoints/checkpoint_best.pt
| [input] dictionary: 64 types
| [label] dictionary: 24 types
| loading model from checkpoints/checkpoint_best.pt
Input: Satoshi
(-0.61) Japanese
(-1.20) Arabic
(-2.86) Italian
Input: Sinbad
(-0.30) Arabic
(-1.76) English
(-4.08) Russian
|
COCO-LM/fairseq/docs/tutorial_classifying_names.rst/0
|
{
"file_path": "COCO-LM/fairseq/docs/tutorial_classifying_names.rst",
"repo_id": "COCO-LM",
"token_count": 6519
}
| 164 |
#!/bin/bash
if [ $# -ne 5 ]; then
echo "usage: $0 [dataset=wmt14/full] [langpair=en-de] [databin] [bpecode] [model]"
exit
fi
DATASET=$1
LANGPAIR=$2
DATABIN=$3
BPECODE=$4
MODEL=$5
SRCLANG=$(echo $LANGPAIR | cut -d '-' -f 1)
TGTLANG=$(echo $LANGPAIR | cut -d '-' -f 2)
BPEROOT=examples/backtranslation/subword-nmt/subword_nmt
if [ ! -e $BPEROOT ]; then
BPEROOT=subword-nmt/subword_nmt
if [ ! -e $BPEROOT ]; then
echo 'Cloning Subword NMT repository (for BPE pre-processing)...'
git clone https://github.com/rsennrich/subword-nmt.git
fi
fi
sacrebleu -t $DATASET -l $LANGPAIR --echo src \
| sacremoses tokenize -a -l $SRCLANG -q \
| python $BPEROOT/apply_bpe.py -c $BPECODE \
| fairseq-interactive $DATABIN --path $MODEL \
-s $SRCLANG -t $TGTLANG \
--beam 5 --remove-bpe --buffer-size 1024 --max-tokens 8000 \
| grep ^H- | cut -f 3- \
| sacremoses detokenize -l $TGTLANG -q \
| sacrebleu -t $DATASET -l $LANGPAIR
|
COCO-LM/fairseq/examples/backtranslation/sacrebleu.sh/0
|
{
"file_path": "COCO-LM/fairseq/examples/backtranslation/sacrebleu.sh",
"repo_id": "COCO-LM",
"token_count": 450
}
| 165 |
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
SPM_ENCODE=flores/scripts/spm_encode.py
DATA=data_tmp
SPM_MODEL=criss_checkpoints/sentence.bpe.model
DICT=criss_checkpoints/dict.txt
download_data() {
CORPORA=$1
URL=$2
if [ -f $CORPORA ]; then
echo "$CORPORA already exists, skipping download"
else
echo "Downloading $URL"
wget $URL -O $CORPORA --no-check-certificate || rm -f $CORPORA
if [ -f $CORPORA ]; then
echo "$URL successfully downloaded."
else
echo "$URL not successfully downloaded."
rm -f $CORPORA
fi
fi
}
if [[ -f flores ]]; then
echo "flores already cloned"
else
git clone https://github.com/facebookresearch/flores
fi
mkdir -p $DATA
download_data $DATA/wikipedia_en_ne_si_test_sets.tgz "https://github.com/facebookresearch/flores/raw/master/data/wikipedia_en_ne_si_test_sets.tgz"
pushd $DATA
pwd
tar -vxf wikipedia_en_ne_si_test_sets.tgz
popd
for lang in ne_NP si_LK; do
datadir=$DATA/${lang}-en_XX-flores
rm -rf $datadir
mkdir -p $datadir
TEST_PREFIX=$DATA/wikipedia_en_ne_si_test_sets/wikipedia.test
python $SPM_ENCODE \
--model ${SPM_MODEL} \
--output_format=piece \
--inputs ${TEST_PREFIX}.${lang:0:2}-en.${lang:0:2} ${TEST_PREFIX}.${lang:0:2}-en.en \
--outputs $datadir/test.bpe.${lang}-en_XX.${lang} $datadir/test.bpe.${lang}-en_XX.en_XX
# binarize data
fairseq-preprocess \
--source-lang ${lang} --target-lang en_XX \
--testpref $datadir/test.bpe.${lang}-en_XX \
--destdir $datadir \
--srcdict ${DICT} \
--joined-dictionary \
--workers 4
done
|
COCO-LM/fairseq/examples/criss/download_and_preprocess_flores_test.sh/0
|
{
"file_path": "COCO-LM/fairseq/examples/criss/download_and_preprocess_flores_test.sh",
"repo_id": "COCO-LM",
"token_count": 719
}
| 166 |
# Jointly Learning to Align and Translate with Transformer Models (Garg et al., 2019)
This page includes instructions for training models described in [Jointly Learning to Align and Translate with Transformer Models (Garg et al., 2019)](https://arxiv.org/abs/1909.02074).
## Training a joint alignment-translation model on WMT'18 En-De
##### 1. Extract and preprocess the WMT'18 En-De data
```bash
./prepare-wmt18en2de_no_norm_no_escape_no_agressive.sh
```
##### 2. Generate alignments from statistical alignment toolkits e.g. Giza++/FastAlign.
In this example, we use FastAlign.
```bash
git clone [email protected]:clab/fast_align.git
pushd fast_align
mkdir build
cd build
cmake ..
make
popd
ALIGN=fast_align/build/fast_align
paste bpe.32k/train.en bpe.32k/train.de | awk -F '\t' '{print $1 " ||| " $2}' > bpe.32k/train.en-de
$ALIGN -i bpe.32k/train.en-de -d -o -v > bpe.32k/train.align
```
##### 3. Preprocess the dataset with the above generated alignments.
```bash
fairseq-preprocess \
--source-lang en --target-lang de \
--trainpref bpe.32k/train \
--validpref bpe.32k/valid \
--testpref bpe.32k/test \
--align-suffix align \
--destdir binarized/ \
--joined-dictionary \
--workers 32
```
##### 4. Train a model
```bash
fairseq-train \
binarized \
--arch transformer_wmt_en_de_big_align --share-all-embeddings \
--optimizer adam --adam-betas '(0.9, 0.98)' --clip-norm 0.0 --activation-fn relu\
--lr 0.0002 --lr-scheduler inverse_sqrt --warmup-updates 4000 --warmup-init-lr 1e-07 \
--dropout 0.3 --attention-dropout 0.1 --weight-decay 0.0 \
--max-tokens 3500 --label-smoothing 0.1 \
--save-dir ./checkpoints --log-interval 1000 --max-update 60000 \
--keep-interval-updates -1 --save-interval-updates 0 \
--load-alignments --criterion label_smoothed_cross_entropy_with_alignment \
--fp16
```
Note that the `--fp16` flag requires you have CUDA 9.1 or greater and a Volta GPU or newer.
If you want to train the above model with big batches (assuming your machine has 8 GPUs):
- add `--update-freq 8` to simulate training on 8x8=64 GPUs
- increase the learning rate; 0.0007 works well for big batches
##### 5. Evaluate and generate the alignments (BPE level)
```bash
fairseq-generate \
binarized --gen-subset test --print-alignment \
--source-lang en --target-lang de \
--path checkpoints/checkpoint_best.pt --beam 5 --nbest 1
```
##### 6. Other resources.
The code for:
1. preparing alignment test sets
2. converting BPE level alignments to token level alignments
3. symmetrizing bidirectional alignments
4. evaluating alignments using AER metric
can be found [here](https://github.com/lilt/alignment-scripts)
## Citation
```bibtex
@inproceedings{garg2019jointly,
title = {Jointly Learning to Align and Translate with Transformer Models},
author = {Garg, Sarthak and Peitz, Stephan and Nallasamy, Udhyakumar and Paulik, Matthias},
booktitle = {Conference on Empirical Methods in Natural Language Processing (EMNLP)},
address = {Hong Kong},
month = {November},
url = {https://arxiv.org/abs/1909.02074},
year = {2019},
}
```
|
COCO-LM/fairseq/examples/joint_alignment_translation/README.md/0
|
{
"file_path": "COCO-LM/fairseq/examples/joint_alignment_translation/README.md",
"repo_id": "COCO-LM",
"token_count": 1128
}
| 167 |
#!/usr/bin/env bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
CWD=`pwd`
INSTALL_PATH=$CWD/tokenizers/thirdparty
MOSES=$INSTALL_PATH/mosesdecoder
if [ ! -d $MOSES ]; then
echo 'Cloning Moses github repository (for tokenization scripts)...'
git clone https://github.com/moses-smt/mosesdecoder.git $MOSES
cd $MOSES
# To deal with differences in handling ' vs "
git checkout 03578921cc1a03402
cd -
fi
WMT16_SCRIPTS=$INSTALL_PATH/wmt16-scripts
if [ ! -d $WMT16_SCRIPTS ]; then
echo 'Cloning Romanian tokenization scripts'
git clone https://github.com/rsennrich/wmt16-scripts.git $WMT16_SCRIPTS
fi
KYTEA=$INSTALL_PATH/kytea
if [ ! -f $KYTEA/bin/kytea ]; then
git clone https://github.com/neubig/kytea.git $KYTEA
cd $KYTEA
autoreconf -i
./configure --prefix=`pwd`
make
make install
cd ..
fi
export MECAB=$INSTALL_PATH/mecab-0.996-ko-0.9.2
if [ ! -f $MECAB/bin/mecab ]; then
cd $INSTALL_PATH
curl -LO https://bitbucket.org/eunjeon/mecab-ko/downloads/mecab-0.996-ko-0.9.2.tar.gz
tar zxfv mecab-0.996-ko-0.9.2.tar.gz
cd mecab-0.996-ko-0.9.2/
./configure --prefix=`pwd`
make
make install
cd ..
curl -LO https://bitbucket.org/eunjeon/mecab-ko-dic/downloads/mecab-ko-dic-2.1.1-20180720.tar.gz
tar zxfv mecab-ko-dic-2.1.1-20180720.tar.gz
cd mecab-ko-dic-2.1.1-20180720/
./autogen.sh
./configure --prefix=`pwd` --with-dicdir=$MECAB/lib/mecab/dic/mecab-ko-dic --with-mecab-config=$MECAB/bin/mecab-config
make
sh -c 'echo "dicdir=$MECAB/lib/mecab/dic/mecab-ko-dic" > $MECAB/etc/mecabrc'
make install
cd $CWD
fi
INDIC_RESOURCES_PATH=$INSTALL_PATH/indic_nlp_resources
if [ ! -d $INDIC_RESOURCES_PATH ]; then
echo 'Cloning indic_nlp_resources'
git clone https://github.com/anoopkunchukuttan/indic_nlp_resources.git $INDIC_RESOURCES_PATH
fi
if [ ! -f $INSTALL_PATH/seg_my.py ]; then
cd $INSTALL_PATH
wget http://lotus.kuee.kyoto-u.ac.jp/WAT/my-en-data/wat2020.my-en.zip
unzip wat2020.my-en.zip
# switch to python3
cat wat2020.my-en/myseg.py |sed 's/^sys.std/###sys.std/g' | sed 's/### sys/sys/g' | sed 's/unichr/chr/g' > seg_my.py
cd $CWD
fi
pip install pythainlp sacrebleu indic-nlp-library
|
COCO-LM/fairseq/examples/m2m_100/install_dependecies.sh/0
|
{
"file_path": "COCO-LM/fairseq/examples/m2m_100/install_dependecies.sh",
"repo_id": "COCO-LM",
"token_count": 1106
}
| 168 |
from typing import NamedTuple, List
from urllib.parse import urlparse
import os, sys
import subprocess
from subprocess import check_call, check_output
import glob
import wget
import re
import multiprocessing as mp
from functools import partial
import pathlib
from collections import OrderedDict
WORKDIR_ROOT = os.environ.get('WORKDIR_ROOT', None)
if WORKDIR_ROOT is None or not WORKDIR_ROOT.strip():
print('please specify your working directory root in OS environment variable WORKDIR_ROOT. Exitting..."')
sys.exit(-1)
# scripts and data locations
CWD = os.getcwd()
UTILS = f"{CWD}/utils"
MOSES = f"{UTILS}/mosesdecoder"
SGM_TOOL = f'{MOSES}/scripts/ems/support/input-from-sgm.perl'
TMX2CORPUS = f"{UTILS}/tmx2corpus"
TMX_TOOL = f'python {TMX2CORPUS}/tmx2corpus.py'
to_data_path = f'{WORKDIR_ROOT}/wmt'
download_to = f'{to_data_path}/downloads'
manually_downloads = f'{to_data_path}/downloads'
extract_to = f'{to_data_path}/extracted'
#DESTDIR=${WORKDIR_ROOT}/ML50/raw/
raw_data = f'{WORKDIR_ROOT}/ML50/raw'
####
class DLDataset(NamedTuple):
name: str
train_urls: List[str]
valid_urls: List[str]
test_urls: List[str]
train_files_patterns: List[str] = []
valid_files_patterns: List[str] = []
test_files_patterns: List[str] = []
def bar_custom(current, total, width=80):
print("Downloading: %d%% [%d / %d] Ks" % (current / total * 100, current / 1000, total / 1000), end='\r')
def get_downloaded_file(dl_folder, url):
if isinstance(url, tuple):
url, f = url
else:
url_f = urlparse(url)
# f = os.path.split(url_f.path)[-1]
f = '_'.join(url_f.path.split('/')[1:])
return url, f"{dl_folder}/{f}"
def download_parts_and_combine(dl_folder, urls, filename):
parts = []
for url_record in urls:
url, part_file = get_downloaded_file(dl_folder, url_record)
if os.path.exists(part_file):
print(f'{part_file} has already been downloaded so skip')
else:
part_file = wget.download(url, part_file, bar=bar_custom)
parts.append(part_file)
def get_combine_cmd(parts):
#default as tar.gz.??
return f'cat {" ".join(parts)} > {filename}'
combine_cmd = get_combine_cmd(parts)
call(combine_cmd, debug=True)
return filename
def download_a_url(dl_folder, url):
url, filename = get_downloaded_file(dl_folder, url)
if os.path.exists(filename):
print(f'{filename} has already been downloaded so skip')
return filename
print(f'downloading {url} to {filename}')
if isinstance(url, list) or isinstance(url, tuple):
download_parts_and_combine(dl_folder, url, filename)
else:
wget.download(url, filename, bar=bar_custom)
print(f'dowloaded: {filename}')
return filename
def download_files(dl_folder, urls, completed_urls={}):
for url_record in urls:
url, _ = get_downloaded_file(dl_folder, url_record)
filename = download_a_url(dl_folder, url_record)
completed_urls[str(url)] = filename
return completed_urls
def check_need_manual_downalod(dl_folder, to_manually_download_urls):
to_be_manually_dowloaded = []
manually_completed_urls = {}
for url_record, instruction in to_manually_download_urls:
url, filename = get_downloaded_file(dl_folder, url_record)
if not os.path.exists(filename):
print(f'{url} need to be download manually, please download it manually following {instruction}; and copy it to {filename}')
to_be_manually_dowloaded.append((url, filename))
else:
manually_completed_urls[url] = filename
# if len(to_be_manually_dowloaded) > 0:
# raise ValueError('Missing files that need to be downloaded manually; stop the process now.')
return to_be_manually_dowloaded
def download_dataset(to_folder, dl_dataset, completed_urls={}):
download_files(to_folder, dl_dataset.train_urls, completed_urls)
download_files(to_folder, dl_dataset.valid_urls, completed_urls)
download_files(to_folder, dl_dataset.test_urls, completed_urls)
print('completed downloading')
return completed_urls
def call(cmd, debug=False):
if debug:
print(cmd)
check_call(cmd, shell=True)
def get_extract_name(file_path):
path = os.path.split(file_path)
return path[-1] + '_extract' #.split('.')[0]
def extract_file(downloaded_file, extract_folder, get_extract_name=get_extract_name, debug=False):
extract_name = get_extract_name(downloaded_file)
extract_to = f'{extract_folder}/{extract_name}'
os.makedirs(extract_to, exist_ok=True)
if os.path.exists(f'{extract_to}/DONE'):
print(f'{downloaded_file} has already been extracted to {extract_to} so skip')
return extract_to
def get_extract_cmd(filename):
if filename.endswith('.tgz') or filename.endswith('tar.gz'):
return f'tar xzfv {filename} -C {extract_to}'
elif filename.endswith('.gz.tar'):
return f'tar xfv {filename} -C {extract_to}; (cd {extract_to}; gzip -d *.gz; [ $? -eq 0 ] || gzip -d */*.gz)'
elif filename.endswith('.tar'):
return f'tar xfv {filename} -C {extract_to}'
elif filename.endswith('.gz'):
return f'cp {filename} {extract_to}; (cd {extract_to}; gzip -d *.gz)'
elif filename.endswith('.zip'):
return f'unzip {filename} -d {extract_to}'
extract_cmd = get_extract_cmd(downloaded_file)
print(f'extracting {downloaded_file}')
if isinstance(extract_cmd, list):
for c in extract_cmd:
call(c, debug=debug)
else:
call(extract_cmd, debug=debug)
call(f'echo DONE > {extract_to}/DONE')
return extract_to
def extract_all_files(
completed_urls, extract_folder,
get_extract_name=get_extract_name,
completed_extraction={},
debug=False):
extracted_folders = OrderedDict()
for url, downloaded_file in set(completed_urls.items()):
if downloaded_file in completed_extraction:
print(f'{downloaded_file} is already extracted; so skip')
continue
folder = extract_file(downloaded_file, extract_folder, get_extract_name, debug)
extracted_folders[url] = folder
return extracted_folders
def my_glob(folder):
for p in [f'{folder}/*', f'{folder}/*/*', f'{folder}/*/*/*']:
for f in glob.glob(p):
yield f
def sgm2raw(sgm, debug):
to_file = sgm[0:len(sgm) - len('.sgm')]
if os.path.exists(to_file):
debug and print(f'{sgm} already converted to {to_file}; so skip')
return to_file
cmd = f'{SGM_TOOL} < {sgm} > {to_file}'
call(cmd, debug)
return to_file
def tmx2raw(tmx, debug):
to_file = tmx[0:len(tmx) - len('.tmx')]
to_folder = os.path.join(*os.path.split(tmx)[:-1])
if os.path.exists(f'{to_folder}/bitext.en'):
debug and print(f'{tmx} already extracted to {to_file}; so skip')
return to_file
cmd = f'(cd {to_folder}; {TMX_TOOL} {tmx})'
call(cmd, debug)
return to_file
CZENG16_REGEX = re.compile(r'.*?data.plaintext-format/0[0-9]train$')
WMT19_WIKITITLES_REGEX = re.compile(r'.*?wikititles-v1.(\w\w)-en.tsv.gz')
TSV_REGEX = re.compile(r'.*?(\w\w)-(\w\w).tsv$')
def cut_wikitles(wiki_file, debug):
# different languages have different file names:
if wiki_file.endswith('wiki/fi-en/titles.fi-en'):
to_file1 = f'{wiki_file}.fi'
to_file2 = f'{wiki_file}.en'
BACKSLASH = '\\'
cmd1 = f"cat {wiki_file} | sed 's/|||/{BACKSLASH}t/g' |cut -f1 |awk '{{$1=$1}};1' > {to_file1}"
cmd2 = f"cat {wiki_file} | sed 's/|||/{BACKSLASH}t/g' |cut -f2 |awk '{{$1=$1}};1' > {to_file2}"
# elif WMT19_WIKITITLES_REGEX.match(wiki_file):
# src = WMT19_WIKITITLES_REGEX.match(wiki_file).groups()[0]
# to_file1 = f'{wiki_file}.{src}'
# to_file2 = f'{wiki_file}.en'
# cmd1 = f"cat {wiki_file} | cut -f1 |awk '{{$1=$1}};1' > {to_file1}"
# cmd2 = f"cat {wiki_file} | cut -f2 |awk '{{$1=$1}};1' > {to_file2}"
else:
return None
if os.path.exists(to_file1) and os.path.exists(to_file2):
debug and print(f'{wiki_file} already processed to {to_file1} and {to_file2}; so skip')
return wiki_file
call(cmd1, debug=debug)
call(cmd2, debug=debug)
return wiki_file
def cut_tsv(file, debug):
m = TSV_REGEX.match(file)
if m is None:
raise ValueError(f'{file} is not matching tsv pattern')
src = m.groups()[0]
tgt = m.groups()[1]
to_file1 = f'{file}.{src}'
to_file2 = f'{file}.{tgt}'
cmd1 = f"cat {file} | cut -f1 |awk '{{$1=$1}};1' > {to_file1}"
cmd2 = f"cat {file} | cut -f2 |awk '{{$1=$1}};1' > {to_file2}"
if os.path.exists(to_file1) and os.path.exists(to_file2):
debug and print(f'{file} already processed to {to_file1} and {to_file2}; so skip')
return file
call(cmd1, debug=debug)
call(cmd2, debug=debug)
return file
def convert_file_if_needed(file, debug):
if file.endswith('.sgm'):
return sgm2raw(file, debug)
elif file.endswith('.tmx'):
return tmx2raw(file, debug)
elif file.endswith('wiki/fi-en/titles.fi-en'):
return cut_wikitles(file, debug)
# elif WMT19_WIKITITLES_REGEX.match(file):
# return cut_wikitles(file, debug)
elif file.endswith('.tsv'):
return cut_tsv(file, debug)
elif CZENG16_REGEX.match(file):
return convert2czeng17(file, debug)
else:
return file
def convert_files_if_needed(extracted_foldrs, my_glob=my_glob, debug=False):
return {
url: list(sorted(set(convert_file_if_needed(f, debug)) for f in sorted(set(my_glob(folder)))))
for url, folder in extracted_foldrs.items()
}
def match_patt(file_path, file_pattern, src, tgt, lang):
return file_pattern.format(src=src, tgt=tgt, lang=lang) in file_path
def match_patts(file_path, file_patterns, src, tgt, lang):
for file_pattern in file_patterns:
params = { k: v for k, v in [('src', src), ('tgt', tgt), ('lang', lang)] if k in file_pattern}
matching = file_pattern.format(**params)
if isinstance(file_pattern, tuple):
pattern, directions = file_pattern
if f'{src}-{tgt}' in directions and matching in file_path:
return True
else:
if matching in file_path:
return True
return False
def extracted_glob(extracted_folder, file_patterns, src, tgt, lang):
def get_matching_pattern(file_pattern):
params = {
k: v
for k, v in [('src', src), ('tgt', tgt), ('lang', lang)]
if '{' + k + '}' in file_pattern
}
file_pattern = re.sub(r'{src:(.*?)}', r'\1' if lang == src else '', file_pattern)
file_pattern = re.sub(r'{tgt:(.*?)}', r'\1' if lang == tgt else '', file_pattern)
file_pattern = file_pattern.format(**params)
return file_pattern
for file_pattern in file_patterns:
if isinstance(file_pattern, tuple):
file_pattern, lang_pairs = file_pattern
if f'{src}-{tgt}' not in lang_pairs:
continue
# print('working on pattern: ', file_pattern, lang_pairs )
matching_pattern = get_matching_pattern(file_pattern)
if matching_pattern is None:
continue
glob_patterns = f'{extracted_folder}/{matching_pattern}'
# print('glob_patterns: ', glob_patterns)
for f in glob.glob(glob_patterns):
yield f
# for debug usage
def all_extracted_files(split, src, tgt, extracted_folders, split_urls):
def get_url(url):
if isinstance(url, tuple):
url, downloaded_file = url
return url
return [
f
for url in split_urls
for f in my_glob(extracted_folders[str(get_url(url))])
]
def concat_files(split, src, tgt, extracted_folders, split_urls, path_patterns, to_folder, debug=False):
# if debug:
# print('extracted files to be filtered by patterns: ',
# '\n\t'.join(sorted(all_extracted_files(split, src, tgt, extracted_folders, split_urls))))
for lang in [src, tgt]:
to_file = f'{to_folder}/{split}.{src}-{tgt}.{lang}'
s_src, s_tgt, s_lang = src.split('_')[0], tgt.split('_')[0], lang.split('_')[0]
files = []
for url in split_urls:
if isinstance(url, tuple):
url, downloaded_file = url
if str(url) not in extracted_folders:
print(f'warning: {url} not in extracted files')
for extracted_file in set(
extracted_glob(
extracted_folders[str(url)], path_patterns,
s_src, s_tgt, s_lang)):
files.append(extracted_file)
if len(files) == 0:
print('warning: ', f'No files found for split {to_file}')
continue
files = sorted(set(files))
print(f'concating {len(files)} files into {to_file}')
cmd = ['cat'] + [f'"{f}"' for f in files] + [f'>{to_file}']
cmd = " ".join(cmd)
call(cmd, debug=debug)
UTILS = os.path.join(pathlib.Path(__file__).parent, 'utils')
LID_MODEL = f'{download_to}/lid.176.bin'
LID_MULTI = f'{UTILS}/fasttext_multi_filter.py'
def lid_filter(split, src, tgt, from_folder, to_folder, debug=False):
if not os.path.exists(LID_MODEL):
call(f'wget -nc https://dl.fbaipublicfiles.com/fasttext/supervised-models/lid.176.bin -O {LID_MODEL}')
from_prefix = f'{from_folder}/{split}.{src}-{tgt}'
to_prefix = f'{to_folder}/{split}.{src}-{tgt}'
if os.path.exists(f'{from_prefix}.{src}') and os.path.exists(f'{from_prefix}.{tgt}'):
s_src, s_tgt = src.split('_')[0], tgt.split('_')[0]
cmd = (
f'python {LID_MULTI} --model {LID_MODEL} --inputs {from_prefix}.{src} {from_prefix}.{tgt} '
f'--langs {s_src} {s_tgt} --outputs {to_prefix}.{src} {to_prefix}.{tgt}'
)
print(f'filtering {from_prefix}')
call(cmd, debug=debug)
def concat_into_splits(dl_dataset, src, tgt, extracted_folders, to_folder, debug):
to_folder_tmp = f"{to_folder}_tmp"
os.makedirs(to_folder_tmp, exist_ok=True)
concat_files('train', src, tgt,
extracted_folders,
split_urls=dl_dataset.train_urls,
path_patterns=dl_dataset.train_files_patterns,
to_folder=to_folder_tmp, debug=debug)
lid_filter('train', src, tgt, to_folder_tmp, to_folder, debug)
concat_files('valid', src, tgt,
extracted_folders,
split_urls=dl_dataset.valid_urls,
path_patterns=dl_dataset.valid_files_patterns,
to_folder=to_folder, debug=debug)
concat_files('test', src, tgt,
extracted_folders,
split_urls=dl_dataset.test_urls,
path_patterns=dl_dataset.test_files_patterns,
to_folder=to_folder, debug=debug)
def download_multi(dl_folder, extract_folder, urls, num_processes=8, debug=False):
pool = mp.Pool(processes=num_processes)
download_f = partial(download_a_url, dl_folder)
downloaded_files = pool.imap_unordered(download_f, urls)
pool.close()
pool.join()
BLEU_REGEX = re.compile("^BLEU\\S* = (\\S+) ")
def run_eval_bleu(cmd):
output = check_output(cmd, shell=True, stderr=subprocess.STDOUT).decode("utf-8").strip()
print(output)
bleu = -1.0
for line in output.strip().split('\n'):
m = BLEU_REGEX.search(line)
if m is not None:
bleu = m.groups()[0]
bleu = float(bleu)
break
return bleu
def check_wmt_test_bleu(raw_folder, wmt_lang_pairs):
not_matchings = []
for wmt, src_tgts in wmt_lang_pairs:
for src_tgt in src_tgts:
print(f'checking test bleus for: {src_tgt} at {wmt}')
src, tgt = src_tgt.split('-')
ssrc, stgt = src[:2], tgt[:2]
if os.path.exists(f'{raw_folder}/test.{tgt}-{src}.{src}'):
# reversed direction may have different test set
test_src = f'{raw_folder}/test.{tgt}-{src}.{src}'
else:
test_src = f'{raw_folder}/test.{src}-{tgt}.{src}'
cmd1 = f'cat {test_src} | sacrebleu -t "{wmt}" -l {stgt}-{ssrc}; [ $? -eq 0 ] || echo ""'
test_tgt = f'{raw_folder}/test.{src}-{tgt}.{tgt}'
cmd2 = f'cat {test_tgt} | sacrebleu -t "{wmt}" -l {ssrc}-{stgt}; [ $? -eq 0 ] || echo ""'
bleu1 = run_eval_bleu(cmd1)
if bleu1 != 100.0:
not_matchings.append(f'{wmt}:{src_tgt} source side not matching: {test_src}')
bleu2 = run_eval_bleu(cmd2)
if bleu2 != 100.0:
not_matchings.append(f'{wmt}:{src_tgt} target side not matching: {test_tgt}')
return not_matchings
def download_and_extract(
to_folder, lang_pairs, dl_dataset,
to_manually_download_urls,
completed_urls={}, completed_extraction={},
debug=False):
dl_folder = f'{to_folder}/downloads'
extract_folder = f'{to_folder}/extracted'
raw_folder = f'{to_folder}/raw'
lid_filtered = f'{to_folder}/lid_filtered'
os.makedirs(extract_folder, exist_ok=True)
os.makedirs(raw_folder, exist_ok=True)
os.makedirs(lid_filtered, exist_ok=True)
to_be_manually_dowloaded = check_need_manual_downalod(dl_folder, to_manually_download_urls)
completed_urls = download_dataset(
dl_folder, dl_dataset, completed_urls)
if debug:
print('completed urls: ', completed_urls)
extracted_folders = extract_all_files(
completed_urls,
extract_folder=extract_folder,
completed_extraction=completed_extraction,
debug=debug)
if debug:
print('download files have been extracted to folders: ', extracted_folders)
converted_files = convert_files_if_needed(extracted_folders, debug=False)
for src_tgt in lang_pairs:
print(f'working on {dl_dataset.name}: {src_tgt}')
src, tgt = src_tgt.split('-')
concat_into_splits(dl_dataset,
src=src, tgt=tgt,
extracted_folders=extracted_folders,
to_folder=raw_folder, debug=debug)
print('completed data into: ', raw_folder)
def download_czang16(download_to, username=None):
wgets = [
f'wget --user={username} --password=czeng -P {download_to} http://ufallab.ms.mff.cuni.cz/~bojar/czeng16-data/data-plaintext-format.{i}.tar'
for i in range(10)]
cmds = []
for i, cmd in enumerate(wgets):
filename = f'{download_to}/data-plaintext-format.{i}.tar'
if os.path.exists(filename):
print(f'{filename} has already been downloaded; so skip')
continue
cmds.append(cmd)
if cmds and username is None:
raise ValueError('No czeng username is given; please register at http://ufal.mff.cuni.cz/czeng/czeng16 to obtain username to download')
for cmd in cmds:
call(cmd)
print('done with downloading czeng1.6')
def download_czeng17_script(download_to, extract_folder, debug=False):
url = 'http://ufal.mff.cuni.cz/czeng/download.php?f=convert_czeng16_to_17.pl.zip'
filename = f'{download_to}/convert_czeng16_to_17.pl.zip'
extract_to = f'{extract_folder}/{get_extract_name(filename)}'
script_path = f'{extract_to}/convert_czeng16_to_17.pl'
if not os.path.exists(script_path):
wget.download(url, filename, bar=bar_custom)
extract_to = extract_file(f'{download_to}/convert_czeng16_to_17.pl.zip', extract_folder, get_extract_name=get_extract_name, debug=debug)
return script_path
czeng17_script_path = ""
def convert2czeng17(file, debug):
en_file = f'{file}.en'
cs_file = f'{file}.cs'
if not os.path.exists(en_file) or not os.path.exists(cs_file):
cs_cmd = f'cat {file} | perl {czeng17_script_path} | cut -f3 > {cs_file}'
en_cmd = f'cat {file} | perl {czeng17_script_path} | cut -f4 > {en_file}'
call(cs_cmd, debug)
call(en_cmd, debug)
else:
print(f'already extracted: {en_file} and {cs_file}')
return file
def extract_czeng17(extract_folder, debug=False):
url = 'http://ufal.mff.cuni.cz/czeng/download.php?f=convert_czeng16_to_17.pl.zip'
filename = f'{download_to}/convert_czeng16_to_17.pl.zip'
extract_to = f'{extract_folder}/{get_extract_name(filename)}'
script_path = f'{extract_to}/convert_czeng16_to_17.pl'
if not os.path.exists(script_path):
wget.download(url, filename, bar=bar_custom)
extract_to = extract_file(f'{download_to}/convert_czeng16_to_17.pl.zip', extract_folder, get_extract_name=get_extract_name, debug=debug)
return script_path
#########
# definitions of wmt data sources
# for es-en
# Punctuation in the official test sets will be encoded with ASCII characters (not complex Unicode characters) as much as possible. You may want to normalize your system's output before submission. You are able able to use a rawer version of the test sets that does not have this normalization.
# script to normalize punctuation: http://www.statmt.org/wmt11/normalize-punctuation.perl
wmt13_es_en = DLDataset(
name='wmt13_es-en',
train_urls=[
'http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz',
'http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz',
'http://www.statmt.org/wmt13/training-parallel-un.tgz',
'http://www.statmt.org/wmt13/training-parallel-nc-v8.tgz',
],
valid_urls=[
('http://www.statmt.org/wmt13/dev.tgz', 'wmt13_dev.tgz')
],
test_urls=[
('http://www.statmt.org/wmt13/test.tgz', 'wmt13_test.tgz')
],
train_files_patterns=[
('*/europarl-v7.{src}-{tgt}.{lang}', ['es-en']),
('*commoncrawl.{src}-{tgt}.{lang}', ['es-en']),
('*/news-commentary-v8.{src}-{tgt}.{lang}', ['es-en']),
('un/*undoc.2000.{src}-{tgt}.{lang}', ['es-en']),
] ,
valid_files_patterns=[
('dev/newstest2012.{lang}', ['es-en'])
],
test_files_patterns=[
('test/newstest*.{lang}', ['es-en'])
],
)
wmt14_de_fr_en = DLDataset(
name='wmt14_de_fr_en',
train_urls=[
'http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz',
'http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz',
'http://www.statmt.org/wmt13/training-parallel-un.tgz',
'http://www.statmt.org/wmt14/training-parallel-nc-v9.tgz',
('http://www.statmt.org/wmt10/training-giga-fren.tar', 'training-giga-fren.gz.tar'), #it is actuall a gz.tar
],
valid_urls=[
('http://www.statmt.org/wmt14/dev.tgz', 'wmt14_dev.tgz'),
],
test_urls=[
('http://www.statmt.org/wmt14/test-full.tgz', 'wmt14_test_full.tgz'), # cleaned test sets
],
train_files_patterns=[
('*/europarl-v7.{src}-{tgt}.{lang}', ['fr-en', 'de-en']),
('*commoncrawl.{src}-{tgt}.{lang}', ['fr-en', 'de-en']),
('*/*news-commentary-v9.{src}-{tgt}.{lang}', ['fr-en', 'de-en']),
('un/undoc.2000.{src}-{tgt}.{lang}', ['fr-en']),
('*giga-{src}{tgt}*{lang}', ['fr-en'])
],
valid_files_patterns=[
('dev/newstest2013.{lang}', ['fr-en', 'de-en'])
],
test_files_patterns=[
('test-full/newstest*{src}{tgt}-{src:src}{tgt:ref}.{lang}', ['en-de', 'de-en', 'fr-en', 'en-fr']),
],
)
# pip install git+https://github.com/amake/tmx2corpus.git
wmt16_ro_en = DLDataset(
name='wmt16_ro-en',
train_urls=[
('http://data.statmt.org/wmt16/translation-task/training-parallel-ep-v8.tgz', 'wmt16_training-parallel-ep-v8.tgz'),
('http://opus.nlpl.eu/download.php?f=SETIMES/v2/tmx/en-ro.tmx.gz', 'en-ro.tmx.gz'),
],
valid_urls=[
('http://data.statmt.org/wmt16/translation-task/dev-romanian-updated.tgz', 'wmt16_dev.tgz')
],
test_urls=[
('http://data.statmt.org/wmt16/translation-task/test.tgz', 'wmt16_test.tgz')
],
train_files_patterns=[
('*/*europarl-v8.{src}-{tgt}.{lang}', ['ro-en']),
('bitext.{lang}', ['ro-en']) #setimes from tmux
] ,
valid_files_patterns=[
('dev/newsdev2016*{src}{tgt}*.{lang}', ['ro-en', 'ro-en'])
],
test_files_patterns=[
('test/newstest*{src}{tgt}*.{lang}', ['ro-en', 'en-ro'])
],
)
cwmt_wmt_instruction = 'cwmt download instruction at: http://nlp.nju.edu.cn/cwmt-wmt'
wmt17_fi_lv_tr_zh_en_manual_downloads = [
# fake urls to have unique keys for the data
( ('http://nlp.nju.edu.cn/cwmt-wmt/CASIA2015.zip', 'CASIA2015.zip'), cwmt_wmt_instruction),
( ('http://nlp.nju.edu.cn/cwmt-wmt/CASICT2011.zip', 'CASICT2011.zip'), cwmt_wmt_instruction),
( ('http://nlp.nju.edu.cn/cwmt-wmt/CASICT2015.zip', 'CASICT2015.zip'), cwmt_wmt_instruction),
( ('http://nlp.nju.edu.cn/cwmt-wmt/Datum2015.zip', 'Datum2015.zip'), cwmt_wmt_instruction),
( ('http://nlp.nju.edu.cn/cwmt-wmt/Datum2017.zip', 'Datum2017.zip'), cwmt_wmt_instruction),
( ('http://nlp.nju.edu.cn/cwmt-wmt/NEU2017.zip', 'NEU2017.zip'), cwmt_wmt_instruction),
]
wmt17_fi_lv_tr_zh_en = DLDataset(
name='wmt17_fi_lv_tr_zh_en',
train_urls=[
('http://data.statmt.org/wmt17/translation-task/training-parallel-ep-v8.tgz', 'wmt17_training-parallel-ep-v8.tgz'),
'http://data.statmt.org/wmt17/translation-task/training-parallel-nc-v12.tgz',
'http://www.statmt.org/wmt15/wiki-titles.tgz',
('http://opus.nlpl.eu/download.php?f=SETIMES/v2/tmx/en-tr.tmx.gz', 'en-tr.tmx.gz'),
('http://data.statmt.org/wmt17/translation-task/rapid2016.tgz', 'wmt17_rapid2016.tgz'),
'http://data.statmt.org/wmt17/translation-task/leta.v1.tgz',
'http://data.statmt.org/wmt17/translation-task/dcep.lv-en.v1.tgz',
'http://data.statmt.org/wmt17/translation-task/books.lv-en.v1.tgz',
(('https://stuncorpusprod.blob.core.windows.net/corpusfiles/UNv1.0.en-zh.tar.gz.00',
'https://stuncorpusprod.blob.core.windows.net/corpusfiles/UNv1.0.en-zh.tar.gz.01',), 'UNv1.0.en-zh.tar.gz'),
#manually download files:
('http://nlp.nju.edu.cn/cwmt-wmt/CASIA2015.zip', 'CASIA2015.zip'),
('http://nlp.nju.edu.cn/cwmt-wmt/CASICT2011.zip', 'CASICT2011.zip'),
('http://nlp.nju.edu.cn/cwmt-wmt/CASICT2015.zip', 'CASICT2015.zip'),
('http://nlp.nju.edu.cn/cwmt-wmt/Datum2015.zip', 'Datum2015.zip'),
('http://nlp.nju.edu.cn/cwmt-wmt/Datum2017.zip', 'Datum2017.zip'),
('http://nlp.nju.edu.cn/cwmt-wmt/NEU2017.zip', 'NEU2017.zip'),
],
valid_urls=[
('http://data.statmt.org/wmt17/translation-task/dev.tgz', 'wmt17_dev.tgz'),
],
test_urls=[
#NEW: Improved translations for zh test sets
('http://data.statmt.org/wmt17/translation-task/test-update-1.tgz', 'wmt17_test_zh_en.tgz'),
('http://data.statmt.org/wmt17/translation-task/test.tgz', 'wmt17_test_others.tgz')
],
train_files_patterns=[
('casict*/cas*{src:ch}{tgt:en}.txt', ['zh-en', 'zh-en'] ),
('casia*/cas*{src:ch}{tgt:en}.txt', ['zh-en', 'zh-en'] ),
('dataum*/Book*{src:cn}{tgt:en}.txt', ['zh-en', 'zh-en']),
('neu*/NEU*{src:cn}{tgt:en}.txt', ['zh-en', 'zh-en'] ),
('*/*UNv1.0.en-zh.{src:zh}{tgt:en}', ['zh-en']),
('training/*news-commentary-v12.{src}-{tgt}.{lang}', ['zh-en', ]),
('*/*europarl-v8.{src}-{tgt}.{lang}', ['fi-en', 'lv-en']),
('wiki/fi-en/titles.{src}-{tgt}.{lang}', ['fi-en', ]),
('rapid2016.{tgt}-{src}.{lang}', ['fi-en', 'lv-en']),
('*/leta.{lang}', ['lv-en']),
('*/dcep.{lang}', ['lv-en']),
('*/farewell.{lang}', ['lv-en']),
('bitext.{lang}', ['tr-en']),
] ,
valid_files_patterns=[
('dev/newsdev2017*{src}{tgt}-{src:src}{tgt:ref}.{lang}',
[
'fi-en', 'lv-en', 'tr-en', 'zh-en',
'en-fi', 'en-lv', 'en-tr', 'en-zh'
]),
('dev/newstest2016*{src}{tgt}-{src:src}{tgt:ref}.{lang}',
[
'fi-en', 'tr-en',
'en-fi', 'en-tr',
]),
],
test_files_patterns=[
('test/newstest2017-{src}{tgt}-{src:src}{tgt:ref}.{lang}',
[
'fi-en', 'lv-en', 'tr-en',
'en-fi', 'en-lv', 'en-tr',
]),
('newstest2017-{src}{tgt}-{src:src}{tgt:ref}.{lang}',
[
'zh-en',
'en-zh'
]),
],
)
czeng_instruction = 'download instruction at: http://ufal.mff.cuni.cz/czeng/czeng16'
#alternative: use the prepared data but detokenize it?
wmt18_cs_et_en_manual_downloads = [
#for cs, need to register and download; Register and download CzEng 1.6.
#Better results can be obtained by using a subset of sentences, released under a new version name CzEng 1.7.
# ((f'http://ufallab.ms.mff.cuni.cz/~bojar/czeng16-data/data-plaintext-format.{i}.tar',
# f'data-plaintext-format.{i}.tar'), czeng_instruction)
# for i in range(10)
]
wmt18_cs_et_en = DLDataset(
name='wmt18_cs_et_en',
train_urls=[
'http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz',
'http://data.statmt.org/wmt18/translation-task/training-parallel-ep-v8.tgz',
'https://s3.amazonaws.com/web-language-models/paracrawl/release1/paracrawl-release1.en-cs.zipporah0-dedup-clean.tgz',
'https://s3.amazonaws.com/web-language-models/paracrawl/release1/paracrawl-release1.en-et.zipporah0-dedup-clean.tgz',
'http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz',
'http://data.statmt.org/wmt18/translation-task/training-parallel-nc-v13.tgz',
('http://data.statmt.org/wmt18/translation-task/rapid2016.tgz', 'wmt18_rapid2016.tgz'),
# (tuple(
# (f'http://ufallab.ms.mff.cuni.cz/~bojar/czeng16-data/data-plaintext-format.{i}.tar',
# f'data-plaintext-format.{i}.tar')
# for i in range(10)
# ),
# 'czeng16_data_plaintext.gz.tar'),
],
valid_urls=[
('http://data.statmt.org/wmt18/translation-task/dev.tgz', 'wmt18_dev.tgz'),
],
test_urls=[
('http://data.statmt.org/wmt18/translation-task/test.tgz', 'wmt18_test.tgz'),
],
train_files_patterns=[
# ('*/*europarl-v7.{src}-{tgt}.{lang}', ['cs-en']),
('*/*europarl-v8.{src}-{tgt}.{lang}', ['et-en']),
# ('*paracrawl-release1.{tgt}-{src}.zipporah0-dedup-clean.{lang}', ['cs-en', 'et-en']),
('*paracrawl-release1.{tgt}-{src}.zipporah0-dedup-clean.{lang}', ['et-en']),
# ('*commoncrawl.{src}-{tgt}.{lang}', ['cs-en']),
# ('*/news-commentary-v13.{src}-{tgt}.{lang}', ['cs-en']),
# ('data.plaintext-format/*train.{lang}', ['cs-en']),
('rapid2016.{tgt}-{src}.{lang}', ['et-en']),
] ,
valid_files_patterns=[
('dev/newsdev2018*{src}{tgt}-{src:src}{tgt:ref}.{lang}', ['et-en']),
# ('dev/newstest2017*{src}{tgt}-{src:src}{tgt:ref}.{lang}', ['cs-en'])
],
test_files_patterns=[
('test/newstest2018-{src}{tgt}-{src:src}{tgt:ref}.{lang}',
# ['cs-en', 'et-en']),
['et-en']),
]
)
ru_en_yandex_instruction = 'Yandex Corpus download instruction at: https://translate.yandex.ru/corpus?lang=en'
wmt19_ru_gu_kk_lt_manual_downloads = [
(('https://translate.yandex.ru/corpus?lang=en', 'wmt19_1mcorpus.zip'), ru_en_yandex_instruction)
]
wmt19_ru_gu_kk_lt = DLDataset(
name='wmt19_ru_gu_kk_lt',
train_urls=[
'http://www.statmt.org/europarl/v9/training/europarl-v9.lt-en.tsv.gz',
'https://s3.amazonaws.com/web-language-models/paracrawl/release3/en-lt.bicleaner07.tmx.gz',
'https://s3.amazonaws.com/web-language-models/paracrawl/release1/paracrawl-release1.en-ru.zipporah0-dedup-clean.tgz',
'http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz',
'http://data.statmt.org/news-commentary/v14/training/news-commentary-v14-wmt19.en-kk.tsv.gz',
'http://data.statmt.org/news-commentary/v14/training/news-commentary-v14.en-ru.tsv.gz',
'http://data.statmt.org/wikititles/v1/wikititles-v1.kk-en.tsv.gz',
'http://data.statmt.org/wikititles/v1/wikititles-v1.ru-en.tsv.gz',
'http://data.statmt.org/wikititles/v1/wikititles-v1.kk-en.tsv.gz',
'http://data.statmt.org/wikititles/v1/wikititles-v1.lt-en.tsv.gz',
'http://data.statmt.org/wikititles/v1/wikititles-v1.gu-en.tsv.gz',
(('https://stuncorpusprod.blob.core.windows.net/corpusfiles/UNv1.0.en-ru.tar.gz.00',
'https://stuncorpusprod.blob.core.windows.net/corpusfiles/UNv1.0.en-ru.tar.gz.01',
'https://stuncorpusprod.blob.core.windows.net/corpusfiles/UNv1.0.en-ru.tar.gz.02',),
'wmt19_UNv1.0.en-ru.tar.gz'),
'https://tilde-model.s3-eu-west-1.amazonaws.com/rapid2016.en-lt.tmx.zip',
('https://translate.yandex.ru/corpus?lang=en', 'wmt19_1mcorpus.zip'),
],
valid_urls=[
('http://data.statmt.org/wmt19/translation-task/dev.tgz', 'wmt19_dev.tgz'),
],
test_urls=[
('http://data.statmt.org/wmt19/translation-task/test.tgz', 'wmt19_test.tgz'),
],
train_files_patterns=[
('*europarl-v9.{src}-{tgt}.tsv.{lang}', ['lt-en']),
#paracrawl
('*paracrawl-release1.{tgt}-{src}.zipporah0-dedup-clean.{lang}', ['ru-en']),
('bitext.{lang}', ['lt-en',]),
('*commoncrawl.{src}-{tgt}.{lang}', ['ru-en',]),
('*news-commentary-v14-wmt19.{tgt}-{src}.tsv.{lang}', ['kk-en', ]),
('*news-commentary-v14.{tgt}-{src}.tsv.{lang}', ['ru-en']),
#yandex
('corpus.{tgt}_{src}.1m.{lang}', ['ru-en']),
('wikititles_v1_wikititles-v1.{src}-{tgt}.tsv.{lang}', ['ru-en', 'kk-en', 'lt-en', 'gu-en']),
('*/UNv1.0.{tgt}-{src}.{lang}', ['ru-en']),
#rapid
('bitext.{lang}', ['lt-en'])
],
valid_files_patterns=[
('dev/newsdev2019*{src}{tgt}-{src:src}{tgt:ref}.{lang}', ['gu-en', 'kk-en', 'lt-en']),
('dev/newstest2018*{src}{tgt}-{src:src}{tgt:ref}.{lang}', ['ru-en']),
],
test_files_patterns=[
('sgm/newstest2019-{src}{tgt}-{src:src}{tgt:ref}.{lang}',
['ru-en', 'gu-en', 'kk-en', 'lt-en', 'en-ru', 'en-gu', 'en-kk', 'en-lt']),
]
)
#########
if __name__ == "__main__":
# speed up the downloads with multiple processing
dl_folder = f'{to_data_path}/downloads'
extract_folder = f'{to_data_path}/extracted'
urls = [
url
for dataset in [wmt13_es_en, wmt14_de_fr_en, wmt16_ro_en, wmt18_cs_et_en, wmt19_ru_gu_kk_lt]
for urls in [dataset.train_urls, dataset.valid_urls, dataset.test_urls]
for url in urls
]
urls = set(urls)
download_multi(dl_folder, extract_folder, urls, num_processes=8, debug=True)
# check manually downlaods
to_manually_download_urls = (
wmt17_fi_lv_tr_zh_en_manual_downloads + wmt18_cs_et_en_manual_downloads + wmt19_ru_gu_kk_lt_manual_downloads
)
to_be_manually_dowloaded = check_need_manual_downalod(dl_folder, to_manually_download_urls)
if len(to_be_manually_dowloaded) > 0:
print('Missing files that need to be downloaded manually; stop the process now.')
exit(-1)
completed_urls = {}
completed_extraction = {}
def work_on_wmt(directions, wmt_data):
download_and_extract(
to_data_path,
directions,
wmt_data,
to_manually_download_urls=to_manually_download_urls,
completed_urls=completed_urls, completed_extraction=completed_extraction, debug=True)
work_on_wmt(
['es_XX-en_XX'],
wmt13_es_en,)
work_on_wmt(
[
'fr_XX-en_XX', 'en_XX-fr_XX',
# 'en_XX-de_DE', 'de_DE-en_XX',
],
wmt14_de_fr_en,)
work_on_wmt(
['ro_RO-en_XX', 'en_XX-ro_XX'],
wmt16_ro_en,)
work_on_wmt(
[
# 'zh_CN-en_XX',
'lv_LV-en_XX', 'fi_FI-en_XX', 'tr_TR-en_XX',
#in case the reversed directions have different train/valid/test data
# 'en_XX-zh_CN',
'en_XX-lv_LV', 'en_XX-fi_FI', 'en_XX-tr_TR',
],
wmt17_fi_lv_tr_zh_en, )
# czeng17_script_path = download_czeng17_script(download_to, extract_to, debug=False)
# cz_username = None
work_on_wmt(
[
# 'cs_CZ-en_XX',
'et_EE-en_XX'],
wmt18_cs_et_en,)
work_on_wmt(
[
# 'ru_RU-en_XX', 'en_XX-ru_RU',
'gu_IN-en_XX', 'kk_KZ-en_XX', 'lt_LT-en_XX',
#in case the reversed directions have different train/valid/test data
'en_XX-gu_IN', 'en_XX-kk_KZ', 'en_XX-lt_LT'
],
wmt19_ru_gu_kk_lt,)
not_matching = check_wmt_test_bleu(
f'{to_data_path}/raw',
[
('wmt13', ['es_XX-en_XX']),
('wmt14/full', ['fr_XX-en_XX',]),
('wmt16', ['ro_RO-en_XX',]),
# ('wmt17/improved', ['zh_CN-en_XX']),
('wmt17', [ 'lv_LV-en_XX', 'fi_FI-en_XX', 'tr_TR-en_XX']),
('wmt18', ['cs_CZ-en_XX', 'et_EE-en_XX']),
('wmt19', ['gu_IN-en_XX', 'kk_KZ-en_XX', 'lt_LT-en_XX']),
#'ru_RU-en_XX',
]
)
if len(not_matching) > 0:
print('the following datasets do not have matching test datasets:\n\t', '\n\t'.join(not_matching))
|
COCO-LM/fairseq/examples/multilingual/data_scripts/download_wmt19_and_before.py/0
|
{
"file_path": "COCO-LM/fairseq/examples/multilingual/data_scripts/download_wmt19_and_before.py",
"repo_id": "COCO-LM",
"token_count": 18741
}
| 169 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
from contextlib import redirect_stdout
from fairseq import options
from fairseq_cli import generate
from examples.noisychannel import rerank_options, rerank_utils
def score_bw(args):
if args.backwards1:
scorer1_src = args.target_lang
scorer1_tgt = args.source_lang
else:
scorer1_src = args.source_lang
scorer1_tgt = args.target_lang
if args.score_model2 is not None:
if args.backwards2:
scorer2_src = args.target_lang
scorer2_tgt = args.source_lang
else:
scorer2_src = args.source_lang
scorer2_tgt = args.target_lang
rerank1_is_gen = (
args.gen_model == args.score_model1 and args.source_prefix_frac is None
)
rerank2_is_gen = (
args.gen_model == args.score_model2 and args.source_prefix_frac is None
)
(
pre_gen,
left_to_right_preprocessed_dir,
right_to_left_preprocessed_dir,
backwards_preprocessed_dir,
lm_preprocessed_dir,
) = rerank_utils.get_directories(
args.data_dir_name,
args.num_rescore,
args.gen_subset,
args.gen_model_name,
args.shard_id,
args.num_shards,
args.sampling,
args.prefix_len,
args.target_prefix_frac,
args.source_prefix_frac,
)
score1_file = rerank_utils.rescore_file_name(
pre_gen,
args.prefix_len,
args.model1_name,
target_prefix_frac=args.target_prefix_frac,
source_prefix_frac=args.source_prefix_frac,
backwards=args.backwards1,
)
if args.score_model2 is not None:
score2_file = rerank_utils.rescore_file_name(
pre_gen,
args.prefix_len,
args.model2_name,
target_prefix_frac=args.target_prefix_frac,
source_prefix_frac=args.source_prefix_frac,
backwards=args.backwards2,
)
if args.right_to_left1:
rerank_data1 = right_to_left_preprocessed_dir
elif args.backwards1:
rerank_data1 = backwards_preprocessed_dir
else:
rerank_data1 = left_to_right_preprocessed_dir
gen_param = ["--batch-size", str(128), "--score-reference", "--gen-subset", "train"]
if not rerank1_is_gen and not os.path.isfile(score1_file):
print("STEP 4: score the translations for model 1")
model_param1 = [
"--path",
args.score_model1,
"--source-lang",
scorer1_src,
"--target-lang",
scorer1_tgt,
]
gen_model1_param = [rerank_data1] + gen_param + model_param1
gen_parser = options.get_generation_parser()
input_args = options.parse_args_and_arch(gen_parser, gen_model1_param)
with open(score1_file, "w") as f:
with redirect_stdout(f):
generate.main(input_args)
if (
args.score_model2 is not None
and not os.path.isfile(score2_file)
and not rerank2_is_gen
):
print("STEP 4: score the translations for model 2")
if args.right_to_left2:
rerank_data2 = right_to_left_preprocessed_dir
elif args.backwards2:
rerank_data2 = backwards_preprocessed_dir
else:
rerank_data2 = left_to_right_preprocessed_dir
model_param2 = [
"--path",
args.score_model2,
"--source-lang",
scorer2_src,
"--target-lang",
scorer2_tgt,
]
gen_model2_param = [rerank_data2] + gen_param + model_param2
gen_parser = options.get_generation_parser()
input_args = options.parse_args_and_arch(gen_parser, gen_model2_param)
with open(score2_file, "w") as f:
with redirect_stdout(f):
generate.main(input_args)
def cli_main():
parser = rerank_options.get_reranking_parser()
args = options.parse_args_and_arch(parser)
score_bw(args)
if __name__ == "__main__":
cli_main()
|
COCO-LM/fairseq/examples/noisychannel/rerank_score_bw.py/0
|
{
"file_path": "COCO-LM/fairseq/examples/noisychannel/rerank_score_bw.py",
"repo_id": "COCO-LM",
"token_count": 2012
}
| 170 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# This file defines example configuration arguments for quantizing
# a transformer model with product quantization
# Number of Centroids for Product Quantization, by default 256 (byte-aligned)
n_centroids:
Linear:
key: in_features
value: {"*": 256}
Embedding:
key: embedding_dim
value: {"*": 256}
# Block Sizes for Product Quantization
# We suggest: 8 for FFN, 4 for ATTN, 4 for embedding projections, 8 for embeddings
block_sizes:
Linear:
key: fuzzy_name
value: {fc: 8, attn: 4, emb: 4}
Embedding:
key: fuzzy_name
value: {emb: 8}
# Layers to Quantize Sequentially
# We suggest: first FFN, then EMB, then ATTN
layers_to_quantize:
- decoder\\.layers\\.\d+\\.fc[12]
- decoder\\.embed_tokens\\.embeddings\\.[012]\\.[01]
- decoder\\.layers\\.\d+\\.self_attn\\.(k_proj|v_proj|q_proj|out_proj)
|
COCO-LM/fairseq/examples/quant_noise/transformer_quantization_config.yaml/0
|
{
"file_path": "COCO-LM/fairseq/examples/quant_noise/transformer_quantization_config.yaml",
"repo_id": "COCO-LM",
"token_count": 401
}
| 171 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
import torch.nn.functional as F
from fairseq import utils
from fairseq.criterions import LegacyFairseqCriterion, register_criterion
from fairseq.data import encoders
@register_criterion("wsc")
class WSCCriterion(LegacyFairseqCriterion):
def __init__(self, args, task):
super().__init__(args, task)
if self.args.save_predictions is not None:
self.prediction_h = open(self.args.save_predictions, "w")
else:
self.prediction_h = None
self.bpe = encoders.build_bpe(args.bpe)
self.tokenizer = encoders.build_tokenizer(args.tokenizer)
def __del__(self):
if self.prediction_h is not None:
self.prediction_h.close()
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
parser.add_argument("--wsc-margin-alpha", type=float, metavar="A", default=1.0)
parser.add_argument("--wsc-margin-beta", type=float, metavar="B", default=0.0)
parser.add_argument(
"--wsc-cross-entropy",
action="store_true",
help="use cross entropy formulation instead of margin loss",
)
parser.add_argument(
"--save-predictions", metavar="FILE", help="file to save predictions to"
)
def get_masked_input(self, tokens, mask):
masked_tokens = tokens.clone()
masked_tokens[mask] = self.task.mask
return masked_tokens
def get_lprobs(self, model, tokens, mask):
logits, _ = model(src_tokens=self.get_masked_input(tokens, mask))
lprobs = F.log_softmax(logits, dim=-1, dtype=torch.float)
scores = lprobs.gather(2, tokens.unsqueeze(-1)).squeeze(-1)
mask = mask.type_as(scores)
scores = (scores * mask).sum(dim=-1) / mask.sum(dim=-1)
return scores
def get_loss(self, query_lprobs, cand_lprobs):
if self.args.wsc_cross_entropy:
return F.cross_entropy(
torch.cat([query_lprobs, cand_lprobs]).unsqueeze(0),
query_lprobs.new([0]).long(),
)
else:
return (
-query_lprobs
+ self.args.wsc_margin_alpha
* (cand_lprobs - query_lprobs + self.args.wsc_margin_beta).clamp(min=0)
).sum()
def forward(self, model, sample, reduce=True):
# compute loss and accuracy
loss, nloss = 0.0, 0
ncorrect, nqueries = 0, 0
for i, label in enumerate(sample["labels"]):
query_lprobs = self.get_lprobs(
model,
sample["query_tokens"][i].unsqueeze(0),
sample["query_masks"][i].unsqueeze(0),
)
cand_lprobs = self.get_lprobs(
model,
sample["candidate_tokens"][i],
sample["candidate_masks"][i],
)
pred = (query_lprobs >= cand_lprobs).all().item()
if label is not None:
label = 1 if label else 0
ncorrect += 1 if pred == label else 0
nqueries += 1
if label:
# only compute a loss for positive instances
nloss += 1
loss += self.get_loss(query_lprobs, cand_lprobs)
id = sample["id"][i].item()
if self.prediction_h is not None:
print("{}\t{}\t{}".format(id, pred, label), file=self.prediction_h)
if nloss == 0:
loss = torch.tensor(0.0, requires_grad=True)
sample_size = nqueries if nqueries > 0 else 1
logging_output = {
"loss": utils.item(loss.data) if reduce else loss.data,
"ntokens": sample["ntokens"],
"nsentences": sample["nsentences"],
"sample_size": sample_size,
"ncorrect": ncorrect,
"nqueries": nqueries,
}
return loss, sample_size, logging_output
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
loss_sum = sum(log.get("loss", 0) for log in logging_outputs)
ntokens = sum(log.get("ntokens", 0) for log in logging_outputs)
nsentences = sum(log.get("nsentences", 0) for log in logging_outputs)
sample_size = sum(log.get("sample_size", 0) for log in logging_outputs)
agg_output = {
"loss": loss_sum / sample_size / math.log(2),
"ntokens": ntokens,
"nsentences": nsentences,
"sample_size": sample_size,
}
ncorrect = sum(log.get("ncorrect", 0) for log in logging_outputs)
nqueries = sum(log.get("nqueries", 0) for log in logging_outputs)
if nqueries > 0:
agg_output["accuracy"] = ncorrect / float(nqueries)
return agg_output
@register_criterion("winogrande")
class WinograndeCriterion(WSCCriterion):
def forward(self, model, sample, reduce=True):
# compute loss and accuracy
query_lprobs = self.get_lprobs(
model,
sample["query_tokens"],
sample["query_masks"],
)
cand_lprobs = self.get_lprobs(
model,
sample["candidate_tokens"],
sample["candidate_masks"],
)
pred = query_lprobs >= cand_lprobs
loss = self.get_loss(query_lprobs, cand_lprobs)
sample_size = sample["query_tokens"].size(0)
ncorrect = pred.sum().item()
logging_output = {
"loss": utils.item(loss.data) if reduce else loss.data,
"ntokens": sample["ntokens"],
"nsentences": sample["nsentences"],
"sample_size": sample_size,
"ncorrect": ncorrect,
"nqueries": sample_size,
}
return loss, sample_size, logging_output
|
COCO-LM/fairseq/examples/roberta/wsc/wsc_criterion.py/0
|
{
"file_path": "COCO-LM/fairseq/examples/roberta/wsc/wsc_criterion.py",
"repo_id": "COCO-LM",
"token_count": 2907
}
| 172 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
class SubwordSplitter(object):
def process_line(self, string):
raise NotImplementedError
def split(self, string):
raise NotImplementedError
class NoneWordSplitter(object):
def __init__(self, model):
pass
def split(self, string):
return [string]
def process_line(self, string):
return [string]
def finished_word(self, string):
return True
def merge(self, list_of_string):
return "".join(list_of_string)
def last_full_word_step(self, tokens, step):
return len(tokens)
def end_idx_last_full_word(self, tokens):
return len(tokens)
class BPEWordSplitter(object):
# TODO: lock back here
def __init__(self, model_path):
super().__init__()
from subword_nmt.apply_bpe import BPE
with open(model_path) as f:
self.model = BPE(f)
def split(self, string):
return self.model.process_line(string).split()
def end_idx_last_full_word(self, tokens):
# Begin of word indices
bow_indices = [0] + [i + 1 for i, t in enumerate(tokens[1:]) if t[-2:] != "@@"]
if len(bow_indices) < 2:
return 0
else:
return bow_indices[-1]
def merge(self, list_of_string):
return " ".join([item.replace("@@", "") for item in list_of_string])
class SentencePieceModelWordSplitter(object):
def __init__(self, model_path):
super().__init__()
import sentencepiece as spm
self.model = spm.SentencePieceProcessor()
self.model.Load(model_path)
def split(self, string):
return self.model.EncodeAsPieces(string)
def end_idx_last_full_word(self, tokens):
# Begin of word indices
bow_indices = [i for i, t in enumerate(tokens) if t[0] == "\u2581"]
if len(bow_indices) < 2:
return 0
else:
return bow_indices[-1]
def merge(self, list_of_string):
return self.model.DecodePieces(list_of_string)
SPLITTER_DICT = {
None: NoneWordSplitter,
"BPE": BPEWordSplitter,
"SentencePieceModel": SentencePieceModelWordSplitter,
}
|
COCO-LM/fairseq/examples/simultaneous_translation/eval/agents/word_splitter.py/0
|
{
"file_path": "COCO-LM/fairseq/examples/simultaneous_translation/eval/agents/word_splitter.py",
"repo_id": "COCO-LM",
"token_count": 1005
}
| 173 |
# @package hydra.sweeper
_target_: hydra_plugins.hydra_ax_sweeper.ax_sweeper.AxSweeper
max_batch_size: null
ax_config:
max_trials: 100
early_stop:
minimize: true
max_epochs_without_improvement: 10
epsilon: 1.0e-05
experiment:
name: ${dataset.gen_subset}
objective_name: wer
minimize: true
parameter_constraints: null
outcome_constraints: null
status_quo: null
client:
verbose_logging: false
random_seed: null
params:
decoding.decoder.lmweight:
type: range
bounds: [0.0, 5.0]
decoding.decoder.wordscore:
type: range
bounds: [-5.0, 5.0]
|
COCO-LM/fairseq/examples/speech_recognition/hydra/conf/hydra/sweeper/ax.yaml/0
|
{
"file_path": "COCO-LM/fairseq/examples/speech_recognition/hydra/conf/hydra/sweeper/ax.yaml",
"repo_id": "COCO-LM",
"token_count": 271
}
| 174 |
[[Back]](..)
# S2T Example: Speech Translation (ST) on Multilingual TEDx
[Multilingual TEDx](https://arxiv.org/abs/2102.01757) is multilingual corpus for speech recognition and
speech translation. The data is derived from TEDx talks in 8 source languages
with translations to a subset of 5 target languages.
## Data Preparation
[Download](http://openslr.org/100/) and unpack Multilingual TEDx data to a path
`${MTEDX_ROOT}/${LANG_PAIR}`, then preprocess it with
```bash
# additional Python packages for S2T data processing/model training
pip install pandas torchaudio soundfile sentencepiece
# Generate TSV manifests, features, vocabulary
# and configuration for each language
python examples/speech_to_text/prep_mtedx_data.py \
--data-root ${MTEDX_ROOT} --task asr \
--vocab-type unigram --vocab-size 1000
python examples/speech_to_text/prep_mtedx_data.py \
--data-root ${MTEDX_ROOT} --task st \
--vocab-type unigram --vocab-size 1000
# Add vocabulary and configuration for joint data
# (based on the manifests and features generated above)
python examples/speech_to_text/prep_mtedx_data.py \
--data-root ${MTEDX_ROOT} --task asr --joint \
--vocab-type unigram --vocab-size 8000
python examples/speech_to_text/prep_mtedx_data.py \
--data-root ${MTEDX_ROOT} --task st --joint \
--vocab-type unigram --vocab-size 8000
```
The generated files (manifest, features, vocabulary and data configuration) will be added to
`${MTEDX_ROOT}/${LANG_PAIR}` (per-language data) and `MTEDX_ROOT` (joint data).
## ASR
#### Training
Spanish as example:
```bash
fairseq-train ${MTEDX_ROOT}/es-es \
--config-yaml config_asr.yaml --train-subset train_asr --valid-subset valid_asr \
--save-dir ${ASR_SAVE_DIR} --num-workers 4 --max-tokens 40000 --max-epoch 200 \
--task speech_to_text --criterion label_smoothed_cross_entropy --report-accuracy \
--arch s2t_transformer_xs --optimizer adam --lr 2e-3 --lr-scheduler inverse_sqrt \
--warmup-updates 10000 --clip-norm 10.0 --seed 1 --dropout 0.3 --label-smoothing 0.1 \
--load-pretrained-encoder-from ${PRETRAINED_ENCODER} \
--skip-invalid-size-inputs-valid-test \
--keep-last-epochs 10 --update-freq 8 --patience 10
```
For joint model (using ASR data from all 8 languages):
```bash
fairseq-train ${MTEDX_ROOT} \
--config-yaml config_asr.yaml \
--train-subset train_es-es_asr,train_fr-fr_asr,train_pt-pt_asr,train_it-it_asr,train_ru-ru_asr,train_el-el_asr,train_ar-ar_asr,train_de-de_asr \
--valid-subset valid_es-es_asr,valid_fr-fr_asr,valid_pt-pt_asr,valid_it-it_asr,valid_ru-ru_asr,valid_el-el_asr,valid_ar-ar_asr,valid_de-de_asr \
--save-dir ${MULTILINGUAL_ASR_SAVE_DIR} --num-workers 4 --max-tokens 40000 --max-epoch 200 \
--task speech_to_text --criterion label_smoothed_cross_entropy --report-accuracy \
--arch s2t_transformer_s --optimizer adam --lr 2e-3 --lr-scheduler inverse_sqrt \
--warmup-updates 10000 --clip-norm 10.0 --seed 1 --dropout 0.3 --label-smoothing 0.1 \
--skip-invalid-size-inputs-valid-test \
--keep-last-epochs 10 --update-freq 8 --patience 10 \
--ignore-prefix-size 1
```
where `MULTILINGUAL_ASR_SAVE_DIR` is the checkpoint root path. We set `--update-freq 8` to simulate 8 GPUs
with 1 GPU. You may want to update it accordingly when using more than 1 GPU.
For multilingual models, we prepend target language ID token as target BOS, which should be excluded from
the training loss via `--ignore-prefix-size 1`.
#### Inference & Evaluation
```bash
CHECKPOINT_FILENAME=avg_last_10_checkpoint.pt
python scripts/average_checkpoints.py \
--inputs ${ASR_SAVE_DIR} --num-epoch-checkpoints 10 \
--output "${ASR_SAVE_DIR}/${CHECKPOINT_FILENAME}"
fairseq-generate ${MTEDX_ROOT}/es-es \
--config-yaml config_asr.yaml --gen-subset test --task speech_to_text \
--path ${ASR_SAVE_DIR}/${CHECKPOINT_FILENAME} --max-tokens 50000 --beam 5 \
--skip-invalid-size-inputs-valid-test \
--scoring wer --wer-tokenizer 13a --wer-lowercase --wer-remove-punct --remove-bpe
# For models trained on joint data
CHECKPOINT_FILENAME=avg_last_10_checkpoint.pt
python scripts/average_checkpoints.py \
--inputs ${MULTILINGUAL_ASR_SAVE_DIR} --num-epoch-checkpoints 10 \
--output "${MULTILINGUAL_ASR_SAVE_DIR}/${CHECKPOINT_FILENAME}"
for LANG in es fr pt it ru el ar de; do
fairseq-generate ${MTEDX_ROOT} \
--config-yaml config_asr.yaml --gen-subset test_${LANG}-${LANG}_asr --task speech_to_text \
--prefix-size 1 --path ${MULTILINGUAL_ASR_SAVE_DIR}/${CHECKPOINT_FILENAME} \
--max-tokens 40000 --beam 5 \
--skip-invalid-size-inputs-valid-test \
--scoring wer --wer-tokenizer 13a --wer-lowercase --wer-remove-punct --remove-bpe
done
```
#### Results
| Data | --arch | Params | Es | Fr | Pt | It | Ru | El | Ar | De |
|--------------|--------------------|--------|------|------|------|------|------|-------|-------|-------|
| Monolingual | s2t_transformer_xs | 10M | 46.4 | 45.6 | 54.8 | 48.0 | 74.7 | 109.5 | 104.4 | 111.1 |
## ST
#### Training
Es-En as example:
```bash
fairseq-train ${MTEDX_ROOT}/es-en \
--config-yaml config_st.yaml --train-subset train_st --valid-subset valid_st \
--save-dir ${ST_SAVE_DIR} --num-workers 4 --max-tokens 40000 --max-epoch 200 \
--task speech_to_text --criterion label_smoothed_cross_entropy --report-accuracy \
--arch s2t_transformer_xs --optimizer adam --lr 2e-3 --lr-scheduler inverse_sqrt \
--warmup-updates 10000 --clip-norm 10.0 --seed 1 --dropout 0.3 --label-smoothing 0.1 \
--load-pretrained-encoder-from ${PRETRAINED_ENCODER} \
--skip-invalid-size-inputs-valid-test \
--keep-last-epochs 10 --update-freq 8 --patience 10
```
For multilingual model (all 12 directions):
```bash
fairseq-train ${MTEDX_ROOT} \
--config-yaml config_st.yaml \
--train-subset train_el-en_st,train_es-en_st,train_es-fr_st,train_es-it_st,train_es-pt_st,train_fr-en_st,train_fr-es_st,train_fr-pt_st,train_it-en_st,train_it-es_st,train_pt-en_st,train_pt-es_st,train_ru-en_st \
--valid-subset valid_el-en_st,valid_es-en_st,valid_es-fr_st,valid_es-it_st,valid_es-pt_st,valid_fr-en_st,valid_fr-es_st,valid_fr-pt_st,valid_it-en_st,valid_it-es_st,valid_pt-en_st,valid_pt-es_st,valid_ru-en_st \
--save-dir ${MULTILINGUAL_ST_SAVE_DIR} --num-workers 4 --max-tokens 40000 --max-epoch 200 \
--task speech_to_text --criterion label_smoothed_cross_entropy --report-accuracy \
--arch s2t_transformer_s --optimizer adam --lr 2e-3 --lr-scheduler inverse_sqrt \
--warmup-updates 10000 --clip-norm 10.0 --seed 1 --dropout 0.3 --label-smoothing 0.1 \
--skip-invalid-size-inputs-valid-test \
--keep-last-epochs 10 --update-freq 8 --patience 10 \
--ignore-prefix-size 1 \
--load-pretrained-encoder-from ${PRETRAINED_ENCODER}
```
where `ST_SAVE_DIR` (`MULTILINGUAL_ST_SAVE_DIR`) is the checkpoint root path. The ST encoder is pre-trained by ASR
for faster training and better performance: `--load-pretrained-encoder-from <(JOINT_)ASR checkpoint path>`. We set
`--update-freq 8` to simulate 8 GPUs with 1 GPU. You may want to update it accordingly when using more than 1 GPU.
For multilingual models, we prepend target language ID token as target BOS, which should be excluded from
the training loss via `--ignore-prefix-size 1`.
#### Inference & Evaluation
Average the last 10 checkpoints and evaluate on the `test` split:
```bash
CHECKPOINT_FILENAME=avg_last_10_checkpoint.pt
python scripts/average_checkpoints.py \
--inputs ${ST_SAVE_DIR} --num-epoch-checkpoints 10 \
--output "${ST_SAVE_DIR}/${CHECKPOINT_FILENAME}"
fairseq-generate ${MTEDX_ROOT}/es-en \
--config-yaml config_st.yaml --gen-subset test --task speech_to_text \
--path ${ST_SAVE_DIR}/${CHECKPOINT_FILENAME} \
--max-tokens 50000 --beam 5 --scoring sacrebleu --remove-bpe
# For multilingual models
python scripts/average_checkpoints.py \
--inputs ${MULTILINGUAL_ST_SAVE_DIR} --num-epoch-checkpoints 10 \
--output "${MULTILINGUAL_ST_SAVE_DIR}/${CHECKPOINT_FILENAME}"
for LANGPAIR in es-en es-fr es-pt fr-en fr-es fr-pt pt-en pt-es it-en it-es ru-en el-en; do
fairseq-generate ${MTEDX_ROOT} \
--config-yaml config_st.yaml --gen-subset test_${LANGPAIR}_st --task speech_to_text \
--prefix-size 1 --path ${MULTILINGUAL_ST_SAVE_DIR}/${CHECKPOINT_FILENAME} \
--max-tokens 40000 --beam 5 \
--skip-invalid-size-inputs-valid-test \
--scoring sacrebleu --remove-bpe
done
```
For multilingual models, we force decoding from the target language ID token (as BOS) via `--prefix-size 1`.
#### Results
| Data | --arch | Params | Es-En | Es-Pt | Es-Fr | Fr-En | Fr-Es | Fr-Pt | Pt-En | Pt-Es | It-En | It-Es | Ru-En | El-En |
|--------------|--------------------|-----|-------|-------|-------|-------|-------|-------|-------|-------|-------|-------|-------|-------|
| Bilingual | s2t_transformer_xs | 10M | 7.0 | 12.2 | 1.7 | 8.9 | 10.6 | 7.9 | 8.1 | 8.7 | 6.4 | 1.0 | 0.7 | 0.6 |
| Multilingual | s2t_transformer_s | 31M | 12.3 | 17.4 | 6.1 | 12.0 | 13.6 | 13.2 | 12.0 | 13.7 | 10.7 | 13.1 | 0.6 | 0.8 |
## Citation
Please cite as:
```
@misc{salesky2021mtedx,
title={Multilingual TEDx Corpus for Speech Recognition and Translation},
author={Elizabeth Salesky and Matthew Wiesner and Jacob Bremerman and Roldano Cattoni and Matteo Negri and Marco Turchi and Douglas W. Oard and Matt Post},
year={2021},
}
@inproceedings{wang2020fairseqs2t,
title = {fairseq S2T: Fast Speech-to-Text Modeling with fairseq},
author = {Changhan Wang and Yun Tang and Xutai Ma and Anne Wu and Dmytro Okhonko and Juan Pino},
booktitle = {Proceedings of the 2020 Conference of the Asian Chapter of the Association for Computational Linguistics (AACL): System Demonstrations},
year = {2020},
}
@inproceedings{ott2019fairseq,
title = {fairseq: A Fast, Extensible Toolkit for Sequence Modeling},
author = {Myle Ott and Sergey Edunov and Alexei Baevski and Angela Fan and Sam Gross and Nathan Ng and David Grangier and Michael Auli},
booktitle = {Proceedings of NAACL-HLT 2019: Demonstrations},
year = {2019},
}
```
[[Back]](..)
|
COCO-LM/fairseq/examples/speech_to_text/docs/mtedx_example.md/0
|
{
"file_path": "COCO-LM/fairseq/examples/speech_to_text/docs/mtedx_example.md",
"repo_id": "COCO-LM",
"token_count": 3958
}
| 175 |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Scoring script for computing pairwise BLEU and multi-ref BLEU over a set of
candidate hypotheses.
See `"Mixture Models for Diverse Machine Translation: Tricks of the Trade"
(Shen et al., 2019) <https://arxiv.org/abs/1902.07816>`_.
"""
import argparse
import random
import sys
from itertools import chain
import numpy as np
from sacrebleu import compute_bleu, corpus_bleu as _corpus_bleu
def main():
parser = argparse.ArgumentParser(sys.argv[0])
parser.add_argument(
"--sys", nargs="*", default="", metavar="FILE", help="path to system output"
)
parser.add_argument("--ref", default="", metavar="FILE", help="path to references")
parser.add_argument(
"--output",
default="",
metavar="FILE",
help="print outputs into a pretty format",
)
args = parser.parse_args()
if args.sys:
src, tgt, hypos, log_probs = load_sys(args.sys)
print("pairwise BLEU: %.2f" % pairwise(hypos))
if args.output:
merge(src, tgt, hypos, log_probs, args.output)
if args.ref:
_, _, refs = load_ref(args.ref)
if args.sys:
multi_ref(refs, hypos)
else:
intra_ref(refs)
def dictolist(d):
a = sorted(d.items(), key=lambda i: i[0])
return [i[1] for i in a]
def load_sys(paths):
src, tgt, hypos, log_probs = {}, {}, {}, {}
for path in paths:
with open(path) as f:
for line in f:
line = line.rstrip()
# S: source
# T: target
# D: detokenized system output
if line.startswith(("S-", "T-", "D-")):
i = int(line[line.find("-") + 1 : line.find("\t")])
if line.startswith("S-"):
src[i] = line.split("\t")[1]
if line.startswith("T-"):
tgt[i] = line.split("\t")[1]
if line.startswith("D-"):
if i not in hypos:
hypos[i] = []
log_probs[i] = []
hypos[i].append(line.split("\t")[2])
log_probs[i].append(float(line.split("\t")[1]))
return dictolist(src), dictolist(tgt), dictolist(hypos), dictolist(log_probs)
def load_ref(path):
with open(path) as f:
lines = f.readlines()
src, tgt, refs = [], [], []
i = 0
while i < len(lines):
if lines[i].startswith("S-"):
src.append(lines[i].split("\t")[1].rstrip())
i += 1
elif lines[i].startswith("T-"):
tgt.append(lines[i].split("\t")[1].rstrip())
i += 1
else:
a = []
while i < len(lines) and lines[i].startswith("R"):
a.append(lines[i].split("\t")[1].rstrip())
i += 1
refs.append(a)
return src, tgt, refs
def merge(src, tgt, hypos, log_probs, path):
with open(path, "w") as f:
for s, t, hs, lps in zip(src, tgt, hypos, log_probs):
f.write(s + "\n")
f.write(t + "\n")
f.write("\n")
for h, lp in zip(hs, lps):
f.write("\t%f\t%s\n" % (lp, h.strip()))
f.write("------------------------------------------------------\n")
def corpus_bleu(sys_stream, ref_streams):
bleu = _corpus_bleu(sys_stream, ref_streams, tokenize="none")
return bleu.score
def sentence_bleu(hypothesis, reference):
bleu = _corpus_bleu(hypothesis, reference)
for i in range(1, 4):
bleu.counts[i] += 1
bleu.totals[i] += 1
bleu = compute_bleu(
bleu.counts,
bleu.totals,
bleu.sys_len,
bleu.ref_len,
smooth_method="exp",
)
return bleu.score
def pairwise(sents):
_ref, _hypo = [], []
for s in sents:
for i in range(len(s)):
for j in range(len(s)):
if i != j:
_ref.append(s[i])
_hypo.append(s[j])
return corpus_bleu(_hypo, [_ref])
def multi_ref(refs, hypos):
_ref, _hypo = [], []
ref_cnt = 0
assert len(refs) == len(hypos)
# count number of refs covered
for rs, hs in zip(refs, hypos):
a = set()
for h in hs:
s = [sentence_bleu(h, r) for r in rs]
j = np.argmax(s)
_ref.append(rs[j])
_hypo.append(h)
best = [k for k in range(len(rs)) if s[k] == s[j]]
a.add(random.choice(best))
ref_cnt += len(a)
print("#refs covered: %.2f" % (ref_cnt / len(refs)))
# transpose refs and hypos
refs = list(zip(*refs))
hypos = list(zip(*hypos))
# compute multi-ref corpus BLEU (leave-one-out to be comparable to intra_ref)
k = len(hypos)
m = len(refs)
flat_hypos = [hypos[j][i] for i in range(len(hypos[0])) for j in range(k)]
duplicated_refs = [[ref for ref in refs_i for _ in range(k)] for refs_i in refs]
loo_bleus = []
for held_out_ref in range(m):
remaining_refs = (
duplicated_refs[:held_out_ref] + duplicated_refs[held_out_ref + 1 :]
)
assert len(remaining_refs) == m - 1
loo_bleus.append(corpus_bleu(flat_hypos, remaining_refs))
print("average multi-reference BLEU (leave-one-out): %.2f" % np.mean(loo_bleus))
def intra_ref(refs):
print("ref pairwise BLEU: %.2f" % pairwise(refs))
refs = list(zip(*refs))
m = len(refs)
concat_h = []
concat_rest = [[] for j in range(m - 1)]
for i, h in enumerate(refs):
rest = refs[:i] + refs[i + 1 :]
concat_h.append(h)
for j in range(m - 1):
concat_rest[j].extend(rest[j])
concat_h = list(chain.from_iterable(concat_h))
bleu = corpus_bleu(concat_h, concat_rest)
print("multi-reference BLEU (leave-one-out): %.2f" % bleu)
if __name__ == "__main__":
main()
|
COCO-LM/fairseq/examples/translation_moe/score.py/0
|
{
"file_path": "COCO-LM/fairseq/examples/translation_moe/score.py",
"repo_id": "COCO-LM",
"token_count": 3146
}
| 176 |
# WMT 19
This page provides pointers to the models of Facebook-FAIR's WMT'19 news translation task submission [(Ng et al., 2019)](https://arxiv.org/abs/1907.06616).
## Pre-trained models
Model | Description | Download
---|---|---
`transformer.wmt19.en-de` | En->De Ensemble | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-de.joined-dict.ensemble.tar.gz)
`transformer.wmt19.de-en` | De->En Ensemble | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt19.de-en.joined-dict.ensemble.tar.gz)
`transformer.wmt19.en-ru` | En->Ru Ensemble | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-ru.ensemble.tar.gz)
`transformer.wmt19.ru-en` | Ru->En Ensemble | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt19.ru-en.ensemble.tar.gz)
`transformer_lm.wmt19.en` | En Language Model | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/lm/wmt19.en.tar.gz)
`transformer_lm.wmt19.de` | De Language Model | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/lm/wmt19.de.tar.gz)
`transformer_lm.wmt19.ru` | Ru Language Model | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/lm/wmt19.ru.tar.gz)
## Pre-trained single models before finetuning
Model | Description | Download
---|---|---
`transformer.wmt19.en-de` | En->De Single, no finetuning | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-de.ffn8192.tar.gz)
`transformer.wmt19.de-en` | De->En Single, no finetuning | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt19.de-en.ffn8192.tar.gz)
`transformer.wmt19.en-ru` | En->Ru Single, no finetuning | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-ru.ffn8192.tar.gz)
`transformer.wmt19.ru-en` | Ru->En Single, no finetuning | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt19.ru-en.ffn8192.tar.gz)
## Example usage (torch.hub)
#### Requirements
We require a few additional Python dependencies for preprocessing:
```bash
pip install fastBPE sacremoses
```
#### Translation
```python
import torch
# English to German translation
en2de = torch.hub.load('pytorch/fairseq', 'transformer.wmt19.en-de', checkpoint_file='model1.pt:model2.pt:model3.pt:model4.pt',
tokenizer='moses', bpe='fastbpe')
en2de.translate("Machine learning is great!") # 'Maschinelles Lernen ist großartig!'
# German to English translation
de2en = torch.hub.load('pytorch/fairseq', 'transformer.wmt19.de-en', checkpoint_file='model1.pt:model2.pt:model3.pt:model4.pt',
tokenizer='moses', bpe='fastbpe')
de2en.translate("Maschinelles Lernen ist großartig!") # 'Machine learning is great!'
# English to Russian translation
en2ru = torch.hub.load('pytorch/fairseq', 'transformer.wmt19.en-ru', checkpoint_file='model1.pt:model2.pt:model3.pt:model4.pt',
tokenizer='moses', bpe='fastbpe')
en2ru.translate("Machine learning is great!") # 'Машинное обучение - это здорово!'
# Russian to English translation
ru2en = torch.hub.load('pytorch/fairseq', 'transformer.wmt19.ru-en', checkpoint_file='model1.pt:model2.pt:model3.pt:model4.pt',
tokenizer='moses', bpe='fastbpe')
ru2en.translate("Машинное обучение - это здорово!") # 'Machine learning is great!'
```
#### Language Modeling
```python
# Sample from the English LM
en_lm = torch.hub.load('pytorch/fairseq', 'transformer_lm.wmt19.en', tokenizer='moses', bpe='fastbpe')
en_lm.sample("Machine learning is") # 'Machine learning is the future of computing, says Microsoft boss Satya Nadella ...'
# Sample from the German LM
de_lm = torch.hub.load('pytorch/fairseq', 'transformer_lm.wmt19.de', tokenizer='moses', bpe='fastbpe')
de_lm.sample("Maschinelles lernen ist") # 'Maschinelles lernen ist das A und O (neues-deutschland.de) Die Arbeitsbedingungen für Lehrerinnen und Lehrer sind seit Jahren verbesserungswürdig ...'
# Sample from the Russian LM
ru_lm = torch.hub.load('pytorch/fairseq', 'transformer_lm.wmt19.ru', tokenizer='moses', bpe='fastbpe')
ru_lm.sample("машинное обучение это") # 'машинное обучение это то, что мы называем "искусственным интеллектом".'
```
## Citation
```bibtex
@inproceedings{ng2019facebook},
title = {Facebook FAIR's WMT19 News Translation Task Submission},
author = {Ng, Nathan and Yee, Kyra and Baevski, Alexei and Ott, Myle and Auli, Michael and Edunov, Sergey},
booktitle = {Proc. of WMT},
year = 2019,
}
```
|
COCO-LM/fairseq/examples/wmt19/README.md/0
|
{
"file_path": "COCO-LM/fairseq/examples/wmt19/README.md",
"repo_id": "COCO-LM",
"token_count": 1900
}
| 177 |
/**
* Copyright 2017-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*/
/*
This code is partially adpoted from https://github.com/1ytic/pytorch-edit-distance
*/
#include "edit_dist.h"
#include <torch/types.h>
#ifndef TORCH_CHECK
#define TORCH_CHECK AT_CHECK
#endif
#define CHECK_CUDA(x) TORCH_CHECK(x.type().is_cuda(), #x " must be a CUDA tensor")
#define CHECK_CONTIGUOUS(x) TORCH_CHECK(x.is_contiguous(), #x " must be contiguous")
#define CHECK_INPUT(x) CHECK_CUDA(x); CHECK_CONTIGUOUS(x)
torch::Tensor LevenshteinDistance(
torch::Tensor source,
torch::Tensor target,
torch::Tensor source_length,
torch::Tensor target_length) {
CHECK_INPUT(source);
CHECK_INPUT(target);
CHECK_INPUT(source_length);
CHECK_INPUT(target_length);
return LevenshteinDistanceCuda(source, target, source_length, target_length);
}
torch::Tensor GenerateDeletionLabel(
torch::Tensor source,
torch::Tensor operations) {
CHECK_INPUT(source);
CHECK_INPUT(operations);
return GenerateDeletionLabelCuda(source, operations);
}
std::pair<torch::Tensor, torch::Tensor> GenerateInsertionLabel(
torch::Tensor target,
torch::Tensor operations) {
CHECK_INPUT(target);
CHECK_INPUT(operations);
return GenerateInsertionLabelCuda(target, operations);
}
PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
m.def("levenshtein_distance", &LevenshteinDistance, "Levenshtein distance");
m.def("generate_deletion_labels", &GenerateDeletionLabel, "Generate Deletion Label");
m.def("generate_insertion_labels", &GenerateInsertionLabel, "Generate Insertion Label");
}
|
COCO-LM/fairseq/fairseq/clib/libnat_cuda/binding.cpp/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/clib/libnat_cuda/binding.cpp",
"repo_id": "COCO-LM",
"token_count": 680
}
| 178 |
# @package _group_
quantize_targets: true
extractor_mode: layer_norm
layer_norm_first: true
final_dim: 768
latent_temp: [2.0,0.1,0.999995]
encoder_layerdrop: 0.0
dropout_input: 0.0
dropout_features: 0.0
dropout: 0.0
attention_dropout: 0.0
conv_bias: true
encoder_layers: 24
encoder_embed_dim: 1024
encoder_ffn_embed_dim: 4096
encoder_attention_heads: 16
feature_grad_mult: 1.0
|
COCO-LM/fairseq/fairseq/config/model/wav2vec2/wav2vec2_large.yaml/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/config/model/wav2vec2/wav2vec2_large.yaml",
"repo_id": "COCO-LM",
"token_count": 163
}
| 179 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import math
import os
import torch
import torch.nn.functional as F
from fairseq import metrics, utils
from fairseq.criterions import FairseqCriterion, register_criterion
from fairseq.data.squad import SquadResult, compute_predictions_logits, squad_evaluate
@register_criterion('squad')
class SquadCriterion(FairseqCriterion):
def __init__(self, task):
super().__init__(task)
self.head_name = 'question_answering_head'
@staticmethod
def add_args(parser):
# fmt: off
parser.add_argument('--n-best-size', default=20, type=int,
help="The number of n-best predictions")
parser.add_argument('--max-answer-length', default=30, type=int,
help="The maximum length of the generated answer")
parser.add_argument('--version-2-with-negative', action='store_true')
def forward(self, model, sample, reduce=True):
features, _ = model(
**sample['net_input'],
features_only=True,
classification_head_name=None,
)
p_mask = sample['targets']['p_mask']
if self.training:
start_positions = sample['targets']['starts']
end_positions = sample['targets']['ends']
loss = model.classification_heads[self.head_name].forward(features, start_positions, end_positions, p_mask)
else:
loss = torch.zeros(1, dtype=torch.float, device=features.device, requires_grad=True)
outputs = model.classification_heads[self.head_name].forward(features, p_mask=p_mask)
sample_size = sample['nsentences']
logging_output = {
'loss': utils.item(loss.data) if reduce else loss.data,
'ntokens': sample['ntokens'],
'nsentences': sample['nsentences'],
'sample_size': sample_size,
}
if not self.training:
logging_output['start_logits'] = outputs[0].detach()
logging_output['end_logits'] = outputs[1].detach()
logging_output['index'] = sample['id']
return loss, sample_size, logging_output
@staticmethod
def reduce_metrics(logging_outputs):
loss = sum(log.get('loss', 0) for log in logging_outputs)
ntokens = sum(log.get('ntokens', 0) for log in logging_outputs)
nsentences = sum(log.get('nsentences', 0) for log in logging_outputs)
sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)
metrics.log_scalar('loss', loss / sample_size / math.log(2))
metrics.log_scalar('ntokens', ntokens)
metrics.log_scalar('nsentences', nsentences)
metrics.log_scalar('sample_size', sample_size)
def context_metrics(self, logging_outputs):
if self.training:
return
all_results = []
task = self.task
for log in logging_outputs:
start_logits = log['start_logits']
end_logits = log['end_logits']
indices = log['index']
for i in range(start_logits.size(0)):
index = int(indices[i])
unique_id = task.eval_features[index].unique_id
result = SquadResult(unique_id,
start_logits[i].float().cpu().tolist(),
end_logits[i].float().cpu().tolist(),
)
all_results.append(result)
output_prediction_file = os.path.join(self.args.save_dir, "predictions.json")
output_nbest_file = os.path.join(self.args.save_dir, "nbest_predictions.json")
output_null_log_odds_file = os.path.join(self.args.save_dir, "null_odds.json")
if self.args.version_2_with_negative:
output_null_log_odds_file = os.path.join(self.args.save_dir, "null_odds.json")
else:
output_null_log_odds_file = None
predictions, null_scores = compute_predictions_logits(
task.eval_examples,
task.eval_features,
all_results,
n_best_size=self.args.n_best_size,
max_answer_length=self.args.max_answer_length,
do_lower_case=False,
output_prediction_file=output_prediction_file,
output_nbest_file=output_nbest_file,
output_null_log_odds_file=output_null_log_odds_file,
verbose_logging=False,
version_2_with_negative=self.args.version_2_with_negative,
null_score_diff_threshold=0.0,
tokenizer=task.tokenizer,
)
# TODO: implement xlnet's beam search solution
# predictions = compute_predictions_log_probs(
# task.eval_examples,
# task.eval_features,
# all_results,
# n_best_size=self.args.n_best_size,
# max_answer_length=self.args.max_answer_length,
# output_prediction_file=output_prediction_file,
# output_nbest_file=output_nbest_file,
# output_null_log_odds_file=output_null_log_odds_file,
# start_n_top=self.args.start_n_top,
# end_n_top=self.args.end_n_top,
# version_2_with_negative=self.args.version_2_with_negative,
# tokenizer=task.tokenizer,
# verbose_logging=False,
# )
eval_result = squad_evaluate(task.eval_examples, predictions, null_scores)
print(eval_result)
|
COCO-LM/fairseq/fairseq/criterions/squad_criterion.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/criterions/squad_criterion.py",
"repo_id": "COCO-LM",
"token_count": 2600
}
| 180 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
from . import BaseWrapperDataset
class ColorizeDataset(BaseWrapperDataset):
""" Adds 'colors' property to net input that is obtained from the provided color getter for use by models """
def __init__(self, dataset, color_getter):
super().__init__(dataset)
self.color_getter = color_getter
def collater(self, samples):
base_collate = super().collater(samples)
if len(base_collate) > 0:
base_collate["net_input"]["colors"] = torch.tensor(
list(self.color_getter(self.dataset, s["id"]) for s in samples),
dtype=torch.long,
)
return base_collate
|
COCO-LM/fairseq/fairseq/data/colorize_dataset.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/data/colorize_dataset.py",
"repo_id": "COCO-LM",
"token_count": 333
}
| 181 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass, field
from typing import Optional
from fairseq.data.encoders import register_bpe
from fairseq.dataclass import FairseqDataclass
@dataclass
class BertBPEConfig(FairseqDataclass):
bpe_cased: bool = field(default=False, metadata={"help": "set for cased BPE"})
bpe_vocab_file: Optional[str] = field(
default=None, metadata={"help": "bpe vocab file"}
)
@register_bpe("bert", dataclass=BertBPEConfig)
class BertBPE(object):
def __init__(self, cfg):
try:
from transformers import BertTokenizer
except ImportError:
raise ImportError(
"Please install transformers with: pip install transformers"
)
if cfg.bpe_vocab_file:
self.bert_tokenizer = BertTokenizer(
cfg.bpe_vocab_file, do_lower_case=not cfg.bpe_cased
)
else:
vocab_file_name = (
"bert-base-cased" if cfg.bpe_cased else "bert-base-uncased"
)
self.bert_tokenizer = BertTokenizer.from_pretrained(vocab_file_name)
def encode(self, x: str) -> str:
return " ".join(self.bert_tokenizer.tokenize(x))
def decode(self, x: str) -> str:
return self.bert_tokenizer.clean_up_tokenization(
self.bert_tokenizer.convert_tokens_to_string(x.split(" "))
)
def is_beginning_of_word(self, x: str) -> bool:
return not x.startswith("##")
|
COCO-LM/fairseq/fairseq/data/encoders/hf_bert_bpe.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/data/encoders/hf_bert_bpe.py",
"repo_id": "COCO-LM",
"token_count": 716
}
| 182 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
from typing import Dict, List, Tuple
import numpy as np
import torch
from fairseq.data import Dictionary, FairseqDataset, data_utils
from fairseq.data.concat_dataset import ConcatDataset
from fairseq.data.legacy.block_pair_dataset import BlockPairDataset
from fairseq.data.token_block_dataset import TokenBlockDataset
class MaskedLMDataset(FairseqDataset):
"""
A wrapper Dataset for masked language modelling. The dataset
wraps around TokenBlockDataset or BlockedPairDataset and creates a batch
where the input blocks are masked according to the specified masking
probability. Additionally the batch can also contain sentence level targets
if this is specified.
Args:
dataset: Dataset which generates blocks of data. Only BlockPairDataset
and TokenBlockDataset are supported.
sizes: Sentence lengths
vocab: Dictionary with the vocabulary and special tokens.
pad_idx: Id of padding token in dictionary
mask_idx: Id of mask token in dictionary
classif_token_idx: Id of classification token in dictionary. This is the
token associated with the sentence embedding (Eg: CLS for BERT)
sep_token_idx: Id of separator token in dictionary
(Eg: SEP in BERT)
seed: Seed for random number generator for reproducibility.
shuffle: Shuffle the elements before batching.
has_pairs: Specifies whether the underlying dataset
generates a pair of blocks along with a sentence_target or not.
Setting it to True assumes that the underlying dataset generates a
label for the pair of sentences which is surfaced as
sentence_target. The default value assumes a single block with no
sentence target.
segment_id: An optional segment id for filling in the segment labels
when we are in the single block setting (Eg: XLM). Default is 0.
masking_ratio: specifies what percentage of the blocks should be masked.
masking_prob: specifies the probability of a given token being
replaced with the "MASK" token.
random_token_prob: specifies the probability of a given token being
replaced by a random token from the vocabulary.
"""
def __init__(
self,
dataset: FairseqDataset,
sizes: np.ndarray,
vocab: Dictionary,
pad_idx: int,
mask_idx: int,
classif_token_idx: int,
sep_token_idx: int,
seed: int = 1,
shuffle: bool = True,
has_pairs: bool = True,
segment_id: int = 0,
masking_ratio: float = 0.15,
masking_prob: float = 0.8,
random_token_prob: float = 0.1,
):
# Make sure the input datasets are the ones supported
assert (
isinstance(dataset, TokenBlockDataset)
or isinstance(dataset, BlockPairDataset)
or isinstance(dataset, ConcatDataset)
), (
"MaskedLMDataset only wraps TokenBlockDataset or BlockPairDataset or "
"ConcatDataset"
)
self.dataset = dataset
self.sizes = np.array(sizes)
self.vocab = vocab
self.pad_idx = pad_idx
self.mask_idx = mask_idx
self.classif_token_idx = classif_token_idx
self.sep_token_idx = sep_token_idx
self.shuffle = shuffle
self.seed = seed
self.has_pairs = has_pairs
self.segment_id = segment_id
self.masking_ratio = masking_ratio
self.masking_prob = masking_prob
self.random_token_prob = random_token_prob
# If we have only one block then sizes needs to be updated to include
# the classification token
if not has_pairs:
self.sizes = self.sizes + 1
def __getitem__(self, index: int):
# if has_pairs, then expect 2 blocks and a sentence target
if self.has_pairs:
(block_one, block_two, sentence_target) = self.dataset[index]
else:
block_one = self.dataset[index]
return {
"id": index,
"block_one": block_one,
"block_two": block_two if self.has_pairs else None,
"sentence_target": sentence_target if self.has_pairs else None,
}
def __len__(self):
return len(self.dataset)
def _mask_block(
self,
sentence: np.ndarray,
mask_idx: int,
pad_idx: int,
dictionary_token_range: Tuple,
):
"""
Mask tokens for Masked Language Model training
Samples mask_ratio tokens that will be predicted by LM.
Note:This function may not be efficient enough since we had multiple
conversions between np and torch, we can replace them with torch
operators later.
Args:
sentence: 1d tensor to be masked
mask_idx: index to use for masking the sentence
pad_idx: index to use for masking the target for tokens we aren't
predicting
dictionary_token_range: range of indices in dictionary which can
be used for random word replacement
(e.g. without special characters)
Return:
masked_sent: masked sentence
target: target with words which we are not predicting replaced
by pad_idx
"""
masked_sent = np.copy(sentence)
sent_length = len(sentence)
mask_num = math.ceil(sent_length * self.masking_ratio)
mask = np.random.choice(sent_length, mask_num, replace=False)
target = np.copy(sentence)
for i in range(sent_length):
if i in mask:
rand = np.random.random()
# replace with mask if probability is less than masking_prob
# (Eg: 0.8)
if rand < self.masking_prob:
masked_sent[i] = mask_idx
# replace with random token if probability is less than
# masking_prob + random_token_prob (Eg: 0.9)
elif rand < (self.masking_prob + self.random_token_prob):
# sample random token from dictionary
masked_sent[i] = np.random.randint(
dictionary_token_range[0], dictionary_token_range[1]
)
else:
target[i] = pad_idx
return masked_sent, target
def _collate(self, samples: List[Dict], pad_idx: int, eos_idx: int):
"""
Does the heavy lifting for creating a batch from the input list of
examples. The logic is as follows:
1. Mask the input blocks. In case has_pair is True then we have 2
blocks to mask.
2. Prepend the first masked block tensor with the special token
used as sentence embedding. Eg: CLS in BERT. This happens
irrespective of the value of has_pair.
3. If has_pair is True, then append the first masked block with the
special separator token (eg: SEP for BERT) and compute segment
label accordingly. In this case, also append the second masked
block with this special separator token and compute its segment
label.
4. For the targets tensor, prepend and append with padding index
accordingly.
5. Concatenate all tensors.
"""
if len(samples) == 0:
return {}
# To ensure determinism, we reset the state of the PRNG after every
# batch based on the seed and the first id of the batch. This ensures
# that across epochs we get the same mask for the same example. This
# is needed for reproducibility and is how BERT does masking
# TODO: Can we add deteminism without this constraint?
with data_utils.numpy_seed(self.seed + samples[0]["id"]):
for s in samples:
# token range is needed for replacing with random token during
# masking
token_range = (self.vocab.nspecial, len(self.vocab))
# mask according to specified probabilities.
masked_blk_one, masked_tgt_one = self._mask_block(
s["block_one"],
self.mask_idx,
self.pad_idx,
token_range,
)
tokens = np.concatenate([[self.classif_token_idx], masked_blk_one])
targets = np.concatenate([[self.pad_idx], masked_tgt_one])
segments = np.ones(len(tokens)) * self.segment_id
# if has_pairs is True then we need to add the SEP token to both
# the blocks after masking and re-compute segments based on the new
# lengths.
if self.has_pairs:
tokens_one = np.concatenate([tokens, [self.sep_token_idx]])
targets_one = np.concatenate([targets, [self.pad_idx]])
masked_blk_two, masked_tgt_two = self._mask_block(
s["block_two"], self.mask_idx, self.pad_idx, token_range
)
tokens_two = np.concatenate([masked_blk_two, [self.sep_token_idx]])
targets_two = np.concatenate([masked_tgt_two, [self.pad_idx]])
# block + 1 sep + 1 special (CLS)
segments_one = np.zeros(len(tokens_one))
# block + 1 sep
segments_two = np.ones(len(tokens_two))
tokens = np.concatenate([tokens_one, tokens_two])
targets = np.concatenate([targets_one, targets_two])
segments = np.concatenate([segments_one, segments_two])
s["source"] = torch.LongTensor(tokens)
s["segment_labels"] = torch.LongTensor(segments)
s["lm_target"] = torch.LongTensor(targets)
def merge(key):
return data_utils.collate_tokens(
[s[key] for s in samples], pad_idx, eos_idx, left_pad=False
)
return {
"id": torch.LongTensor([s["id"] for s in samples]),
"ntokens": sum(len(s["source"]) for s in samples),
"net_input": {
"src_tokens": merge("source"),
"segment_labels": merge("segment_labels"),
},
"lm_target": merge("lm_target"),
"sentence_target": torch.LongTensor([s["sentence_target"] for s in samples])
if self.has_pairs
else None,
"nsentences": len(samples),
}
def collater(self, samples: List[Dict]):
"""Merge a list of samples to form a mini-batch.
Args:
samples (List[dict]): samples to collate
Returns:
dict: a mini-batch of data
"""
return self._collate(samples, self.vocab.pad(), self.vocab.eos())
def num_tokens(self, index: int):
"""
Return the number of tokens in a sample. This value is used to
enforce max-tokens during batching.
"""
return self.sizes[index]
def size(self, index: int):
"""
Return an example's size as a float or tuple. This value is used when
filtering a dataset with max-positions.
"""
return self.sizes[index]
def ordered_indices(self):
"""
Return an ordered list of indices. Batches will be constructed based
on this order.
"""
if self.shuffle:
return np.random.permutation(len(self))
else:
order = [np.arange(len(self))]
order.append(self.sizes)
return np.lexsort(order)
@property
def supports_prefetch(self):
return getattr(self.dataset, "supports_prefetch", False)
def prefetch(self, indices):
self.dataset.prefetch(indices)
|
COCO-LM/fairseq/fairseq/data/legacy/masked_lm_dataset.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/data/legacy/masked_lm_dataset.py",
"repo_id": "COCO-LM",
"token_count": 5552
}
| 183 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numpy as np
import torch
from fairseq.data import data_utils
class WordNoising(object):
"""Generate a noisy version of a sentence, without changing words themselves."""
def __init__(self, dictionary, bpe_cont_marker="@@", bpe_end_marker=None):
self.dictionary = dictionary
self.bpe_end = None
if bpe_cont_marker:
self.bpe_end = np.array(
[
not self.dictionary[i].endswith(bpe_cont_marker)
for i in range(len(self.dictionary))
]
)
elif bpe_end_marker:
self.bpe_end = np.array(
[
self.dictionary[i].endswith(bpe_end_marker)
for i in range(len(self.dictionary))
]
)
self.get_word_idx = (
self._get_bpe_word_idx if self.bpe_end is not None else self._get_token_idx
)
def noising(self, x, lengths, noising_prob=0.0):
raise NotImplementedError()
def _get_bpe_word_idx(self, x):
"""
Given a list of BPE tokens, for every index in the tokens list,
return the index of the word grouping that it belongs to.
For example, for input x corresponding to ["how", "are", "y@@", "ou"],
return [[0], [1], [2], [2]].
"""
# x: (T x B)
bpe_end = self.bpe_end[x]
if x.size(0) == 1 and x.size(1) == 1:
# Special case when we only have one word in x. If x = [[N]],
# bpe_end is a scalar (bool) instead of a 2-dim array of bools,
# which makes the sum operation below fail.
return np.array([[0]])
# do a reduce front sum to generate word ids
word_idx = bpe_end[::-1].cumsum(0)[::-1]
word_idx = word_idx.max(0)[None, :] - word_idx
return word_idx
def _get_token_idx(self, x):
"""
This is to extend noising functions to be able to apply to non-bpe
tokens, e.g. word or characters.
"""
x = torch.t(x)
word_idx = np.array([range(len(x_i)) for x_i in x])
return np.transpose(word_idx)
class WordDropout(WordNoising):
"""Randomly drop input words. If not passing blank_idx (default is None),
then dropped words will be removed. Otherwise, it will be replaced by the
blank_idx."""
def __init__(
self,
dictionary,
default_dropout_prob=0.1,
bpe_cont_marker="@@",
bpe_end_marker=None,
):
super().__init__(dictionary, bpe_cont_marker, bpe_end_marker)
self.default_dropout_prob = default_dropout_prob
def noising(self, x, lengths, dropout_prob=None, blank_idx=None):
if dropout_prob is None:
dropout_prob = self.default_dropout_prob
# x: (T x B), lengths: B
if dropout_prob == 0:
return x, lengths
assert 0 < dropout_prob < 1
# be sure to drop entire words
word_idx = self.get_word_idx(x)
sentences = []
modified_lengths = []
for i in range(lengths.size(0)):
# Since dropout probabilities need to apply over non-pad tokens,
# it is not trivial to generate the keep mask without consider
# input lengths; otherwise, this could be done outside the loop
# We want to drop whole words based on word_idx grouping
num_words = max(word_idx[:, i]) + 1
# ith example: [x0, x1, ..., eos, pad, ..., pad]
# We should only generate keep probs for non-EOS tokens. Thus if the
# input sentence ends in EOS, the last word idx is not included in
# the dropout mask generation and we append True to always keep EOS.
# Otherwise, just generate the dropout mask for all word idx
# positions.
has_eos = x[lengths[i] - 1, i] == self.dictionary.eos()
if has_eos: # has eos?
keep = np.random.rand(num_words - 1) >= dropout_prob
keep = np.append(keep, [True]) # keep EOS symbol
else:
keep = np.random.rand(num_words) >= dropout_prob
words = x[: lengths[i], i].tolist()
# TODO: speed up the following loop
# drop words from the input according to keep
new_s = [
w if keep[word_idx[j, i]] else blank_idx for j, w in enumerate(words)
]
new_s = [w for w in new_s if w is not None]
# we need to have at least one word in the sentence (more than the
# start / end sentence symbols)
if len(new_s) <= 1:
# insert at beginning in case the only token left is EOS
# EOS should be at end of list.
new_s.insert(0, words[np.random.randint(0, len(words))])
assert len(new_s) >= 1 and (
not has_eos # Either don't have EOS at end or last token is EOS
or (len(new_s) >= 2 and new_s[-1] == self.dictionary.eos())
), "New sentence is invalid."
sentences.append(new_s)
modified_lengths.append(len(new_s))
# re-construct input
modified_lengths = torch.LongTensor(modified_lengths)
modified_x = torch.LongTensor(
modified_lengths.max(), modified_lengths.size(0)
).fill_(self.dictionary.pad())
for i in range(modified_lengths.size(0)):
modified_x[: modified_lengths[i], i].copy_(torch.LongTensor(sentences[i]))
return modified_x, modified_lengths
class WordShuffle(WordNoising):
"""Shuffle words by no more than k positions."""
def __init__(
self,
dictionary,
default_max_shuffle_distance=3,
bpe_cont_marker="@@",
bpe_end_marker=None,
):
super().__init__(dictionary, bpe_cont_marker, bpe_end_marker)
self.default_max_shuffle_distance = 3
def noising(self, x, lengths, max_shuffle_distance=None):
if max_shuffle_distance is None:
max_shuffle_distance = self.default_max_shuffle_distance
# x: (T x B), lengths: B
if max_shuffle_distance == 0:
return x, lengths
# max_shuffle_distance < 1 will return the same sequence
assert max_shuffle_distance > 1
# define noise word scores
noise = np.random.uniform(
0,
max_shuffle_distance,
size=(x.size(0), x.size(1)),
)
noise[0] = -1 # do not move start sentence symbol
# be sure to shuffle entire words
word_idx = self.get_word_idx(x)
x2 = x.clone()
for i in range(lengths.size(0)):
length_no_eos = lengths[i]
if x[lengths[i] - 1, i] == self.dictionary.eos():
length_no_eos = lengths[i] - 1
# generate a random permutation
scores = word_idx[:length_no_eos, i] + noise[word_idx[:length_no_eos, i], i]
# ensure no reordering inside a word
scores += 1e-6 * np.arange(length_no_eos.item())
permutation = scores.argsort()
# shuffle words
x2[:length_no_eos, i].copy_(
x2[:length_no_eos, i][torch.from_numpy(permutation)]
)
return x2, lengths
class UnsupervisedMTNoising(WordNoising):
"""
Implements the default configuration for noising in UnsupervisedMT
(github.com/facebookresearch/UnsupervisedMT)
"""
def __init__(
self,
dictionary,
max_word_shuffle_distance,
word_dropout_prob,
word_blanking_prob,
bpe_cont_marker="@@",
bpe_end_marker=None,
):
super().__init__(dictionary)
self.max_word_shuffle_distance = max_word_shuffle_distance
self.word_dropout_prob = word_dropout_prob
self.word_blanking_prob = word_blanking_prob
self.word_dropout = WordDropout(
dictionary=dictionary,
bpe_cont_marker=bpe_cont_marker,
bpe_end_marker=bpe_end_marker,
)
self.word_shuffle = WordShuffle(
dictionary=dictionary,
bpe_cont_marker=bpe_cont_marker,
bpe_end_marker=bpe_end_marker,
)
def noising(self, x, lengths):
# 1. Word Shuffle
noisy_src_tokens, noisy_src_lengths = self.word_shuffle.noising(
x=x,
lengths=lengths,
max_shuffle_distance=self.max_word_shuffle_distance,
)
# 2. Word Dropout
noisy_src_tokens, noisy_src_lengths = self.word_dropout.noising(
x=noisy_src_tokens,
lengths=noisy_src_lengths,
dropout_prob=self.word_dropout_prob,
)
# 3. Word Blanking
noisy_src_tokens, noisy_src_lengths = self.word_dropout.noising(
x=noisy_src_tokens,
lengths=noisy_src_lengths,
dropout_prob=self.word_blanking_prob,
blank_idx=self.dictionary.unk(),
)
return noisy_src_tokens
class NoisingDataset(torch.utils.data.Dataset):
def __init__(
self,
src_dataset,
src_dict,
seed,
noiser=None,
noising_class=UnsupervisedMTNoising,
**kwargs
):
"""
Wrap a :class:`~torch.utils.data.Dataset` and apply noise to the
samples based on the supplied noising configuration.
Args:
src_dataset (~torch.utils.data.Dataset): dataset to wrap.
to build self.src_dataset --
a LanguagePairDataset with src dataset as the source dataset and
None as the target dataset. Should NOT have padding so that
src_lengths are accurately calculated by language_pair_dataset
collate function.
We use language_pair_dataset here to encapsulate the tgt_dataset
so we can re-use the LanguagePairDataset collater to format the
batches in the structure that SequenceGenerator expects.
src_dict (~fairseq.data.Dictionary): source dictionary
seed (int): seed to use when generating random noise
noiser (WordNoising): a pre-initialized :class:`WordNoising`
instance. If this is None, a new instance will be created using
*noising_class* and *kwargs*.
noising_class (class, optional): class to use to initialize a
default :class:`WordNoising` instance.
kwargs (dict, optional): arguments to initialize the default
:class:`WordNoising` instance given by *noiser*.
"""
self.src_dataset = src_dataset
self.src_dict = src_dict
self.seed = seed
self.noiser = (
noiser
if noiser is not None
else noising_class(
dictionary=src_dict,
**kwargs,
)
)
def __getitem__(self, index):
"""
Returns a single noisy sample. Multiple samples are fed to the collater
create a noising dataset batch.
"""
src_tokens = self.src_dataset[index]
src_lengths = torch.LongTensor([len(src_tokens)])
src_tokens = src_tokens.unsqueeze(0)
# Transpose src tokens to fit expected shape of x in noising function
# (batch size, sequence length) -> (sequence length, batch size)
src_tokens_t = torch.t(src_tokens)
with data_utils.numpy_seed(self.seed + index):
noisy_src_tokens = self.noiser.noising(src_tokens_t, src_lengths)
# Transpose back to expected src_tokens format
# (sequence length, 1) -> (1, sequence length)
noisy_src_tokens = torch.t(noisy_src_tokens)
return noisy_src_tokens[0]
def __len__(self):
"""
The length of the noising dataset is the length of src.
"""
return len(self.src_dataset)
@property
def supports_prefetch(self):
return self.src_dataset.supports_prefetch
def prefetch(self, indices):
if self.src_dataset.supports_prefetch:
self.src_dataset.prefetch(indices)
|
COCO-LM/fairseq/fairseq/data/noising.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/data/noising.py",
"repo_id": "COCO-LM",
"token_count": 5932
}
| 184 |
from .squad_extractor import SquadExample, SquadFeature, read_squad_examples, squad_convert_examples_to_features
from .basic_tokenizer import BasicTokenizer
from .squad_metrics import SquadResult, compute_predictions_logits, squad_evaluate
|
COCO-LM/fairseq/fairseq/data/squad/__init__.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/data/squad/__init__.py",
"repo_id": "COCO-LM",
"token_count": 67
}
| 185 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from .distributed_timeout_wrapper import DistributedTimeoutWrapper
from .fully_sharded_data_parallel import fsdp_enable_wrap, fsdp_wrap, FullyShardedDataParallel
from .legacy_distributed_data_parallel import LegacyDistributedDataParallel
from .module_proxy_wrapper import ModuleProxyWrapper
from .tpu_distributed_data_parallel import TPUDistributedDataParallel
__all__ = [
"DistributedTimeoutWrapper",
"fsdp_enable_wrap",
"fsdp_wrap",
"FullyShardedDataParallel",
"LegacyDistributedDataParallel",
"ModuleProxyWrapper",
"TPUDistributedDataParallel",
]
|
COCO-LM/fairseq/fairseq/distributed/__init__.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/distributed/__init__.py",
"repo_id": "COCO-LM",
"token_count": 238
}
| 186 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq import utils
from fairseq.models import (
FairseqEncoder,
FairseqEncoderDecoderModel,
FairseqIncrementalDecoder,
register_model,
register_model_architecture,
)
from fairseq.modules import (
AdaptiveSoftmax,
DynamicConv,
FairseqDropout,
LayerNorm,
LightweightConv,
MultiheadAttention,
PositionalEmbedding,
)
@register_model("lightconv")
class LightConvModel(FairseqEncoderDecoderModel):
"""
LightConv and DynamicConv model from `"Pay Less Attention with Lightweight and Dynamic Convolutions" (Wu, et al, 2019)
<https://openreview.net/pdf?id=SkVhlh09tX>`_.
To use LightConv please set ``--encoder-conv-type lightweight --decoder-conv-type lightweight``
To use DynamicConv please set ``--encoder-conv-type dynamic --decoder-conv-type dynamic``
Args:
encoder (LightConvEncoder): the encoder
decoder (LightConvDecoder): the decoder
The LightConv model provides the following named architectures and
command-line arguments:
.. argparse::
:ref: fairseq.models.lightconv_parser
:prog:
"""
@classmethod
def hub_models(cls):
# fmt: off
def moses_subword(path):
return {
'path': path,
'tokenizer': 'moses',
'bpe': 'subword_nmt',
}
return {
'lightconv.no_glu.iwslt14.de-en': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/iwslt14.de-en.lightconv.tar.gz'),
'dynamicconv.no_glu.iwslt14.de-en': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/iwslt14.de-en.dynamicconv.tar.gz'),
'lightconv.no_glu.wmt16.en-de': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt16.en-de.joined-dict.lightconv.tar.gz'),
'dynamicconv.no_glu.wmt16.en-de': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt16.en-de.joined-dict.dynamicconv.tar.gz'),
'lightconv.glu.wmt16.en-de': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt16.en-de.joined-dict.lightconv-glu.tar.gz'),
'dynamicconv.glu.wmt16.en-de': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt16.en-de.joined-dict.dynamicconv-glu.tar.gz'),
'lightconv.glu.wmt17.en-de': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt16.en-de.joined-dict.lightconv-glu.tar.gz'),
'dynamicconv.glu.wmt17.en-de': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt16.en-de.joined-dict.dynamicconv-glu.tar.gz'),
'lightconv.glu.wmt14.en-fr': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt14.en-fr.joined-dict.lightconv-glu.tar.gz'),
'dynamicconv.glu.wmt14.en-fr': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt14.en-fr.joined-dict.dynamicconv-glu.tar.gz'),
'lightconv.glu.wmt17.zh-en': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt17.zh-en.lightconv-glu.tar.gz'),
'dynamicconv.glu.wmt17.zh-en': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/dynamicconv/wmt17.zh-en.dynamicconv-glu.tar.gz'),
}
# fmt: on
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
@staticmethod
def add_args(parser):
"""Add model-specific arguments to the parser."""
parser.add_argument(
"--dropout", type=float, metavar="D", help="dropout probability"
)
parser.add_argument(
"--attention-dropout",
type=float,
metavar="D",
help="dropout probability for attention weights",
)
parser.add_argument(
"--relu-dropout",
type=float,
metavar="D",
help="dropout probability after ReLU in FFN",
)
parser.add_argument(
"--input-dropout",
type=float,
metavar="D",
help="dropout probability of the inputs",
)
parser.add_argument(
"--encoder-embed-path",
type=str,
metavar="STR",
help="path to pre-trained encoder embedding",
)
parser.add_argument(
"--encoder-embed-dim",
type=int,
metavar="N",
help="encoder embedding dimension",
)
parser.add_argument(
"--encoder-conv-dim",
type=int,
metavar="N",
help="encoder embedding dimension",
)
parser.add_argument(
"--encoder-ffn-embed-dim",
type=int,
metavar="N",
help="encoder embedding dimension for FFN",
)
parser.add_argument(
"--encoder-layers", type=int, metavar="N", help="num encoder layers"
)
parser.add_argument(
"--encoder-attention-heads",
type=int,
metavar="N",
help="num encoder attention heads or LightConv/DynamicConv heads",
)
parser.add_argument(
"--encoder-normalize-before",
action="store_true",
help="apply layernorm before each encoder block",
)
parser.add_argument(
"--encoder-learned-pos",
action="store_true",
help="use learned positional embeddings in the encoder",
)
parser.add_argument(
"--decoder-embed-path",
type=str,
metavar="STR",
help="path to pre-trained decoder embedding",
)
parser.add_argument(
"--decoder-embed-dim",
type=int,
metavar="N",
help="decoder embedding dimension",
)
parser.add_argument(
"--decoder-conv-dim",
type=int,
metavar="N",
help="decoder embedding dimension",
)
parser.add_argument(
"--decoder-ffn-embed-dim",
type=int,
metavar="N",
help="decoder embedding dimension for FFN",
)
parser.add_argument(
"--decoder-layers", type=int, metavar="N", help="num decoder layers"
)
parser.add_argument(
"--decoder-attention-heads",
type=int,
metavar="N",
help="num decoder attention heads or LightConv/DynamicConv heads",
)
parser.add_argument(
"--decoder-learned-pos",
action="store_true",
help="use learned positional embeddings in the decoder",
)
parser.add_argument(
"--decoder-normalize-before",
action="store_true",
help="apply layernorm before each decoder block",
)
parser.add_argument(
"--share-decoder-input-output-embed",
action="store_true",
help="share decoder input and output embeddings",
)
parser.add_argument(
"--share-all-embeddings",
action="store_true",
help="share encoder, decoder and output embeddings"
" (requires shared dictionary and embed dim)",
)
parser.add_argument(
"--adaptive-softmax-cutoff",
metavar="EXPR",
help="comma separated list of adaptive softmax cutoff points. "
"Must be used with adaptive_loss criterion",
),
parser.add_argument(
"--adaptive-softmax-dropout",
type=float,
metavar="D",
help="sets adaptive softmax dropout for the tail projections",
)
"""LightConv and DynamicConv arguments"""
parser.add_argument(
"--encoder-kernel-size-list",
type=lambda x: utils.eval_str_list(x, int),
help='list of kernel size (default: "[3,7,15,31,31,31,31]")',
)
parser.add_argument(
"--decoder-kernel-size-list",
type=lambda x: utils.eval_str_list(x, int),
help='list of kernel size (default: "[3,7,15,31,31,31]")',
)
parser.add_argument(
"--encoder-glu", type=utils.eval_bool, help="glu after in proj"
)
parser.add_argument(
"--decoder-glu", type=utils.eval_bool, help="glu after in proj"
)
parser.add_argument(
"--encoder-conv-type",
default="dynamic",
type=str,
choices=["dynamic", "lightweight"],
help="type of convolution",
)
parser.add_argument(
"--decoder-conv-type",
default="dynamic",
type=str,
choices=["dynamic", "lightweight"],
help="type of convolution",
)
parser.add_argument("--weight-softmax", default=True, type=utils.eval_bool)
parser.add_argument(
"--weight-dropout",
type=float,
metavar="D",
help="dropout probability for conv weights",
)
@classmethod
def build_model(cls, args, task):
"""Build a new model instance."""
# make sure all arguments are present in older models
base_architecture(args)
if not hasattr(args, "max_source_positions"):
args.max_source_positions = 1024
if not hasattr(args, "max_target_positions"):
args.max_target_positions = 1024
src_dict, tgt_dict = task.source_dictionary, task.target_dictionary
def build_embedding(dictionary, embed_dim, path=None):
num_embeddings = len(dictionary)
padding_idx = dictionary.pad()
emb = Embedding(num_embeddings, embed_dim, padding_idx)
# if provided, load from preloaded dictionaries
if path:
embed_dict = utils.parse_embedding(path)
utils.load_embedding(embed_dict, dictionary, emb)
return emb
if args.share_all_embeddings:
if src_dict != tgt_dict:
raise RuntimeError(
"--share-all-embeddings requires a joined dictionary"
)
if args.encoder_embed_dim != args.decoder_embed_dim:
raise RuntimeError(
"--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim"
)
if args.decoder_embed_path and (
args.decoder_embed_path != args.encoder_embed_path
):
raise RuntimeError(
"--share-all-embeddings not compatible with --decoder-embed-path"
)
encoder_embed_tokens = build_embedding(
src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = encoder_embed_tokens
args.share_decoder_input_output_embed = True
else:
encoder_embed_tokens = build_embedding(
src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = build_embedding(
tgt_dict, args.decoder_embed_dim, args.decoder_embed_path
)
encoder = LightConvEncoder(args, src_dict, encoder_embed_tokens)
decoder = LightConvDecoder(args, tgt_dict, decoder_embed_tokens)
return LightConvModel(encoder, decoder)
class LightConvEncoder(FairseqEncoder):
"""
LightConv encoder consisting of *args.encoder_layers* layers. Each layer
is a :class:`LightConvEncoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): encoding dictionary
embed_tokens (torch.nn.Embedding): input embedding
"""
def __init__(self, args, dictionary, embed_tokens):
super().__init__(dictionary)
self.dropout_module = FairseqDropout(
args.dropout, module_name=self.__class__.__name__
)
embed_dim = embed_tokens.embedding_dim
self.padding_idx = embed_tokens.padding_idx
self.max_source_positions = args.max_source_positions
self.embed_tokens = embed_tokens
self.embed_scale = math.sqrt(embed_dim)
self.embed_positions = (
PositionalEmbedding(
args.max_source_positions,
embed_dim,
self.padding_idx,
learned=args.encoder_learned_pos,
)
if not args.no_token_positional_embeddings
else None
)
self.layers = nn.ModuleList([])
self.layers.extend(
[
LightConvEncoderLayer(
args, kernel_size=args.encoder_kernel_size_list[i]
)
for i in range(args.encoder_layers)
]
)
self.register_buffer("version", torch.Tensor([2]))
self.normalize = args.encoder_normalize_before
if self.normalize:
self.layer_norm = LayerNorm(embed_dim)
def forward(self, src_tokens, **unused):
"""
Args:
src_tokens (LongTensor): tokens in the source language of shape
`(batch, src_len)`
Returns:
dict:
- **encoder_out** (Tensor): the last encoder layer's output of
shape `(src_len, batch, embed_dim)`
- **encoder_padding_mask** (ByteTensor): the positions of
padding elements of shape `(batch, src_len)`
"""
# embed tokens and positions
x = self.embed_scale * self.embed_tokens(src_tokens)
if self.embed_positions is not None:
x += self.embed_positions(src_tokens)
x = self.dropout_module(x)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
# compute padding mask
encoder_padding_mask = src_tokens.eq(self.padding_idx)
if not encoder_padding_mask.any():
encoder_padding_mask = None
# encoder layers
for layer in self.layers:
x = layer(x, encoder_padding_mask)
if self.normalize:
x = self.layer_norm(x)
return {
"encoder_out": x, # T x B x C
"encoder_padding_mask": encoder_padding_mask, # B x T
}
def reorder_encoder_out(self, encoder_out, new_order):
"""
Reorder encoder output according to *new_order*.
Args:
encoder_out: output from the ``forward()`` method
new_order (LongTensor): desired order
Returns:
*encoder_out* rearranged according to *new_order*
"""
if encoder_out["encoder_out"] is not None:
encoder_out["encoder_out"] = encoder_out["encoder_out"].index_select(
1, new_order
)
if encoder_out["encoder_padding_mask"] is not None:
encoder_out["encoder_padding_mask"] = encoder_out[
"encoder_padding_mask"
].index_select(0, new_order)
return encoder_out
def max_positions(self):
"""Maximum input length supported by the encoder."""
if self.embed_positions is None:
return self.max_source_positions
return min(self.max_source_positions, self.embed_positions.max_positions)
class LightConvDecoder(FairseqIncrementalDecoder):
"""
LightConv decoder consisting of *args.decoder_layers* layers. Each layer
is a :class:`LightConvDecoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): decoding dictionary
embed_tokens (torch.nn.Embedding): output embedding
no_encoder_attn (bool, optional): whether to attend to encoder outputs.
Default: ``False``
"""
def __init__(
self, args, dictionary, embed_tokens, no_encoder_attn=False, final_norm=True
):
super().__init__(dictionary)
self.dropout_module = FairseqDropout(
args.dropout, module_name=self.__class__.__name__
)
self.share_input_output_embed = args.share_decoder_input_output_embed
input_embed_dim = embed_tokens.embedding_dim
embed_dim = args.decoder_embed_dim
output_embed_dim = args.decoder_output_dim
padding_idx = embed_tokens.padding_idx
self.max_target_positions = args.max_target_positions
self.embed_tokens = embed_tokens
self.embed_scale = math.sqrt(embed_dim) # todo: try with input_embed_dim
self.project_in_dim = (
Linear(input_embed_dim, embed_dim, bias=False)
if embed_dim != input_embed_dim
else None
)
self.embed_positions = (
PositionalEmbedding(
args.max_target_positions,
embed_dim,
padding_idx,
learned=args.decoder_learned_pos,
)
if not args.no_token_positional_embeddings
else None
)
self.layers = nn.ModuleList([])
self.layers.extend(
[
LightConvDecoderLayer(
args, no_encoder_attn, kernel_size=args.decoder_kernel_size_list[i]
)
for i in range(args.decoder_layers)
]
)
self.adaptive_softmax = None
self.project_out_dim = (
Linear(embed_dim, output_embed_dim, bias=False)
if embed_dim != output_embed_dim and not args.tie_adaptive_weights
else None
)
if args.adaptive_softmax_cutoff is not None:
self.adaptive_softmax = AdaptiveSoftmax(
len(dictionary),
output_embed_dim,
utils.eval_str_list(args.adaptive_softmax_cutoff, type=int),
dropout=args.adaptive_softmax_dropout,
adaptive_inputs=embed_tokens if args.tie_adaptive_weights else None,
factor=args.adaptive_softmax_factor,
tie_proj=args.tie_adaptive_proj,
)
elif not self.share_input_output_embed:
self.embed_out = nn.Parameter(
torch.Tensor(len(dictionary), output_embed_dim)
)
nn.init.normal_(self.embed_out, mean=0, std=output_embed_dim ** -0.5)
self.register_buffer("version", torch.Tensor([2]))
self.normalize = args.decoder_normalize_before and final_norm
if self.normalize:
self.layer_norm = LayerNorm(embed_dim)
def forward(
self, prev_output_tokens, encoder_out=None, incremental_state=None, **kwargs
):
"""
Args:
prev_output_tokens (LongTensor): previous decoder outputs of shape
`(batch, tgt_len)`, for teacher forcing
encoder_out (Tensor, optional): output from the encoder, used for
encoder-side attention
incremental_state (dict): dictionary used for storing state during
:ref:`Incremental decoding`
Returns:
tuple:
- the last decoder layer's output of shape `(batch, tgt_len,
vocab)`
- the last decoder layer's attention weights of shape `(batch,
tgt_len, src_len)`
"""
# embed positions
positions = (
self.embed_positions(
prev_output_tokens,
incremental_state=incremental_state,
)
if self.embed_positions is not None
else None
)
if incremental_state is not None:
prev_output_tokens = prev_output_tokens[:, -1:]
if positions is not None:
positions = positions[:, -1:]
# embed tokens and positions
x = self.embed_scale * self.embed_tokens(prev_output_tokens)
if self.project_in_dim is not None:
x = self.project_in_dim(x)
if positions is not None:
x += positions
x = self.dropout_module(x)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
attn = None
inner_states = [x]
# decoder layers
for layer in self.layers:
x, attn = layer(
x,
encoder_out["encoder_out"] if encoder_out is not None else None,
encoder_out["encoder_padding_mask"]
if encoder_out is not None
else None,
incremental_state,
)
inner_states.append(x)
if self.normalize:
x = self.layer_norm(x)
# T x B x C -> B x T x C
x = x.transpose(0, 1)
if self.project_out_dim is not None:
x = self.project_out_dim(x)
if self.adaptive_softmax is None:
# project back to size of vocabulary
if self.share_input_output_embed:
x = F.linear(x, self.embed_tokens.weight)
else:
x = F.linear(x, self.embed_out)
return x, {"attn": attn, "inner_states": inner_states}
def max_positions(self):
"""Maximum output length supported by the decoder."""
if self.embed_positions is None:
return self.max_target_positions
return min(self.max_target_positions, self.embed_positions.max_positions)
def buffered_future_mask(self, tensor):
dim = tensor.size(0)
if (
not hasattr(self, "_future_mask")
or self._future_mask is None
or self._future_mask.device != tensor.device
):
self._future_mask = torch.triu(
utils.fill_with_neg_inf(tensor.new(dim, dim)), 1
)
if self._future_mask.size(0) < dim:
self._future_mask = torch.triu(
utils.fill_with_neg_inf(self._future_mask.resize_(dim, dim)), 1
)
return self._future_mask[:dim, :dim]
class LightConvEncoderLayer(nn.Module):
"""Encoder layer block.
Args:
args (argparse.Namespace): parsed command-line arguments
kernel_size: kernel size of the convolution
"""
def __init__(self, args, kernel_size=0):
super().__init__()
self.embed_dim = args.encoder_embed_dim
self.conv_dim = args.encoder_conv_dim
padding_l = (
kernel_size // 2
if kernel_size % 2 == 1
else ((kernel_size - 1) // 2, kernel_size // 2)
)
if args.encoder_glu:
self.linear1 = Linear(self.embed_dim, 2 * self.conv_dim)
self.act = nn.GLU()
else:
self.linear1 = Linear(self.embed_dim, self.conv_dim)
self.act = None
if args.encoder_conv_type == "lightweight":
self.conv = LightweightConv(
self.conv_dim,
kernel_size,
padding_l=padding_l,
weight_softmax=args.weight_softmax,
num_heads=args.encoder_attention_heads,
weight_dropout=args.weight_dropout,
)
elif args.encoder_conv_type == "dynamic":
self.conv = DynamicConv(
self.conv_dim,
kernel_size,
padding_l=padding_l,
weight_softmax=args.weight_softmax,
num_heads=args.encoder_attention_heads,
weight_dropout=args.weight_dropout,
)
else:
raise NotImplementedError
self.linear2 = Linear(self.conv_dim, self.embed_dim)
self.dropout_module = FairseqDropout(
args.dropout, module_name=self.__class__.__name__
)
self.relu_dropout_module = FairseqDropout(
args.relu_dropout, module_name=self.__class__.__name__
)
self.input_dropout_module = FairseqDropout(
args.input_dropout, module_name=self.__class__.__name__
)
self.normalize_before = args.encoder_normalize_before
self.fc1 = Linear(self.embed_dim, args.encoder_ffn_embed_dim)
self.fc2 = Linear(args.encoder_ffn_embed_dim, self.embed_dim)
self.layer_norms = nn.ModuleList([LayerNorm(self.embed_dim) for _ in range(2)])
def forward(self, x, encoder_padding_mask):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor): binary ByteTensor of shape
`(batch, src_len)` where padding elements are indicated by ``1``.
Returns:
encoded output of shape `(batch, src_len, embed_dim)`
"""
residual = x
x = self.maybe_layer_norm(0, x, before=True)
x = self.input_dropout_module(x)
x = self.linear1(x)
if self.act is not None:
x = self.act(x)
if encoder_padding_mask is not None:
x = x.masked_fill(encoder_padding_mask.transpose(0, 1).unsqueeze(2), 0)
x = self.conv(x)
x = self.linear2(x)
x = self.dropout_module(x)
x = residual + x
x = self.maybe_layer_norm(0, x, after=True)
residual = x
x = self.maybe_layer_norm(1, x, before=True)
x = F.relu(self.fc1(x))
x = self.relu_dropout_module(x)
x = self.fc2(x)
x = self.dropout_module(x)
x = residual + x
x = self.maybe_layer_norm(1, x, after=True)
return x
def maybe_layer_norm(self, i, x, before=False, after=False):
assert before ^ after
if after ^ self.normalize_before:
return self.layer_norms[i](x)
else:
return x
def extra_repr(self):
return (
"dropout={}, relu_dropout={}, input_dropout={}, normalize_before={}".format(
self.dropout_module.p,
self.relu_dropout_module.p,
self.input_dropout_module.p,
self.normalize_before,
)
)
class LightConvDecoderLayer(nn.Module):
"""Decoder layer block.
Args:
args (argparse.Namespace): parsed command-line arguments
no_encoder_attn (bool, optional): whether to attend to encoder outputs.
Default: ``False``
kernel_size: kernel size of the convolution
"""
def __init__(self, args, no_encoder_attn=False, kernel_size=0):
super().__init__()
self.embed_dim = args.decoder_embed_dim
self.conv_dim = args.decoder_conv_dim
if args.decoder_glu:
self.linear1 = Linear(self.embed_dim, 2 * self.conv_dim)
self.act = nn.GLU()
else:
self.linear1 = Linear(self.embed_dim, self.conv_dim)
self.act = None
if args.decoder_conv_type == "lightweight":
self.conv = LightweightConv(
self.conv_dim,
kernel_size,
padding_l=kernel_size - 1,
weight_softmax=args.weight_softmax,
num_heads=args.decoder_attention_heads,
weight_dropout=args.weight_dropout,
)
elif args.decoder_conv_type == "dynamic":
self.conv = DynamicConv(
self.conv_dim,
kernel_size,
padding_l=kernel_size - 1,
weight_softmax=args.weight_softmax,
num_heads=args.decoder_attention_heads,
weight_dropout=args.weight_dropout,
)
else:
raise NotImplementedError
self.linear2 = Linear(self.conv_dim, self.embed_dim)
self.dropout_module = FairseqDropout(
args.dropout, module_name=self.__class__.__name__
)
self.relu_dropout_module = FairseqDropout(
args.relu_dropout, module_name=self.__class__.__name__
)
self.input_dropout_module = FairseqDropout(
args.input_dropout, module_name=self.__class__.__name__
)
self.normalize_before = args.decoder_normalize_before
self.conv_layer_norm = LayerNorm(self.embed_dim)
if no_encoder_attn:
self.encoder_attn = None
self.encoder_attn_layer_norm = None
else:
self.encoder_attn = MultiheadAttention(
self.embed_dim,
args.decoder_attention_heads,
dropout=args.attention_dropout,
encoder_decoder_attention=True,
)
self.encoder_attn_layer_norm = LayerNorm(self.embed_dim)
self.fc1 = Linear(self.embed_dim, args.decoder_ffn_embed_dim)
self.fc2 = Linear(args.decoder_ffn_embed_dim, self.embed_dim)
self.final_layer_norm = LayerNorm(self.embed_dim)
self.need_attn = True
def forward(
self,
x,
encoder_out,
encoder_padding_mask,
incremental_state,
prev_conv_state=None,
prev_attn_state=None,
conv_mask=None,
conv_padding_mask=None,
):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor): binary ByteTensor of shape
`(batch, src_len)` where padding elements are indicated by ``1``.
Returns:
encoded output of shape `(batch, src_len, embed_dim)`
"""
residual = x
x = self.maybe_layer_norm(self.conv_layer_norm, x, before=True)
if prev_conv_state is not None:
if incremental_state is None:
incremental_state = {}
self.conv._set_input_buffer(incremental_state, prev_conv_state)
x = self.input_dropout_module(x)
x = self.linear1(x)
if self.act is not None:
x = self.act(x)
x = self.conv(x, incremental_state=incremental_state)
x = self.linear2(x)
x = self.dropout_module(x)
x = residual + x
x = self.maybe_layer_norm(self.conv_layer_norm, x, after=True)
attn = None
if self.encoder_attn is not None:
residual = x
x = self.maybe_layer_norm(self.encoder_attn_layer_norm, x, before=True)
if prev_attn_state is not None:
if incremental_state is None:
incremental_state = {}
prev_key, prev_value = prev_attn_state
saved_state = {"prev_key": prev_key, "prev_value": prev_value}
self.encoder_attn._set_input_buffer(incremental_state, saved_state)
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=incremental_state,
static_kv=True,
need_weights=(not self.training and self.need_attn),
)
x = self.dropout_module(x)
x = residual + x
x = self.maybe_layer_norm(self.encoder_attn_layer_norm, x, after=True)
residual = x
x = self.maybe_layer_norm(self.final_layer_norm, x, before=True)
x = F.relu(self.fc1(x))
x = self.relu_dropout_module(x)
x = self.fc2(x)
x = self.dropout_module(x)
x = residual + x
x = self.maybe_layer_norm(self.final_layer_norm, x, after=True)
return x, attn
def maybe_layer_norm(self, layer_norm, x, before=False, after=False):
assert before ^ after
if after ^ self.normalize_before:
return layer_norm(x)
else:
return x
def make_generation_fast_(self, need_attn=False, **kwargs):
self.need_attn = need_attn
def extra_repr(self):
return (
"dropout={}, relu_dropout={}, input_dropout={}, normalize_before={}".format(
self.dropout_module.p,
self.relu_dropout_module.p,
self.input_dropout_module.p,
self.normalize_before,
)
)
def Embedding(num_embeddings, embedding_dim, padding_idx):
m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
nn.init.constant_(m.weight[padding_idx], 0)
return m
def Linear(in_features, out_features, bias=True):
m = nn.Linear(in_features, out_features, bias)
nn.init.xavier_uniform_(m.weight)
if bias:
nn.init.constant_(m.bias, 0.0)
return m
@register_model_architecture("lightconv", "lightconv")
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 512)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 2048)
args.encoder_layers = getattr(args, "encoder_layers", 7)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 8)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", False)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 8)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", False)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.relu_dropout = getattr(args, "relu_dropout", 0.0)
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.encoder_conv_dim = getattr(args, "encoder_conv_dim", args.encoder_embed_dim)
args.decoder_conv_dim = getattr(args, "decoder_conv_dim", args.decoder_embed_dim)
args.encoder_kernel_size_list = getattr(
args, "encoder_kernel_size_list", [3, 7, 15, 31, 31, 31, 31]
)
args.decoder_kernel_size_list = getattr(
args, "decoder_kernel_size_list", [3, 7, 15, 31, 31, 31]
)
if len(args.encoder_kernel_size_list) == 1:
args.encoder_kernel_size_list = (
args.encoder_kernel_size_list * args.encoder_layers
)
if len(args.decoder_kernel_size_list) == 1:
args.decoder_kernel_size_list = (
args.decoder_kernel_size_list * args.decoder_layers
)
assert (
len(args.encoder_kernel_size_list) == args.encoder_layers
), "encoder_kernel_size_list doesn't match encoder_layers"
assert (
len(args.decoder_kernel_size_list) == args.decoder_layers
), "decoder_kernel_size_list doesn't match decoder_layers"
args.encoder_glu = getattr(args, "encoder_glu", True)
args.decoder_glu = getattr(args, "decoder_glu", True)
args.input_dropout = getattr(args, "input_dropout", 0.1)
args.weight_dropout = getattr(args, "weight_dropout", args.attention_dropout)
@register_model_architecture("lightconv", "lightconv_iwslt_de_en")
def lightconv_iwslt_de_en(args):
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 512)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 1024)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 4)
args.encoder_layers = getattr(args, "encoder_layers", 7)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", 512)
args.decoder_ffn_embed_dim = getattr(args, "decoder_ffn_embed_dim", 1024)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 4)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.attention_dropout = getattr(args, "attention_dropout", 0.1)
args.weight_dropout = getattr(args, "weight_dropout", 0.1)
args.encoder_glu = getattr(args, "encoder_glu", False)
args.decoder_glu = getattr(args, "decoder_glu", False)
args.input_dropout = getattr(args, "input_dropout", 0.0)
base_architecture(args)
@register_model_architecture("lightconv", "lightconv_wmt_en_de")
def lightconv_wmt_en_de(args):
base_architecture(args)
@register_model_architecture("lightconv", "lightconv_wmt_en_de_big")
def lightconv_wmt_en_de_big(args):
args.attention_dropout = getattr(args, "attention_dropout", 0.1)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 1024)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 4096)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 16)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", 1024)
args.decoder_ffn_embed_dim = getattr(args, "decoder_ffn_embed_dim", 4096)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 16)
args.dropout = getattr(args, "dropout", 0.3)
base_architecture(args)
@register_model_architecture("lightconv", "lightconv_wmt_en_fr_big")
def lightconv_wmt_en_fr_big(args):
args.dropout = getattr(args, "dropout", 0.1)
lightconv_wmt_en_de_big(args)
@register_model_architecture("lightconv", "lightconv_wmt_zh_en_big")
def lightconv_wmt_zh_en_big(args):
args.dropout = getattr(args, "dropout", 0.2)
args.attention_dropout = getattr(args, "attention_dropout", 0.2)
args.weight_dropout = getattr(args, "weight_dropout", 0.2)
lightconv_wmt_en_de_big(args)
|
COCO-LM/fairseq/fairseq/models/lightconv.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/models/lightconv.py",
"repo_id": "COCO-LM",
"token_count": 18801
}
| 187 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
import torch.nn.functional as F
from fairseq import utils
from fairseq.iterative_refinement_generator import DecoderOut
from fairseq.models import register_model, register_model_architecture
from fairseq.models.nat import FairseqNATDecoder, FairseqNATModel, ensemble_decoder
from fairseq.models.transformer import Embedding
from fairseq.modules.transformer_sentence_encoder import init_bert_params
def _mean_pooling(enc_feats, src_masks):
# enc_feats: T x B x C
# src_masks: B x T or None
if src_masks is None:
enc_feats = enc_feats.mean(0)
else:
src_masks = (~src_masks).transpose(0, 1).type_as(enc_feats)
enc_feats = (
(enc_feats / src_masks.sum(0)[None, :, None]) * src_masks[:, :, None]
).sum(0)
return enc_feats
def _argmax(x, dim):
return (x == x.max(dim, keepdim=True)[0]).type_as(x)
def _uniform_assignment(src_lens, trg_lens):
max_trg_len = trg_lens.max()
steps = (src_lens.float() - 1) / (trg_lens.float() - 1) # step-size
# max_trg_len
index_t = utils.new_arange(trg_lens, max_trg_len).float()
index_t = steps[:, None] * index_t[None, :] # batch_size X max_trg_len
index_t = torch.round(index_t).long().detach()
return index_t
@register_model("nonautoregressive_transformer")
class NATransformerModel(FairseqNATModel):
@property
def allow_length_beam(self):
return True
@staticmethod
def add_args(parser):
FairseqNATModel.add_args(parser)
# length prediction
parser.add_argument(
"--src-embedding-copy",
action="store_true",
help="copy encoder word embeddings as the initial input of the decoder",
)
parser.add_argument(
"--pred-length-offset",
action="store_true",
help="predicting the length difference between the target and source sentences",
)
parser.add_argument(
"--sg-length-pred",
action="store_true",
help="stop the gradients back-propagated from the length predictor",
)
parser.add_argument(
"--length-loss-factor",
type=float,
help="weights on the length prediction loss",
)
@classmethod
def build_decoder(cls, args, tgt_dict, embed_tokens):
decoder = NATransformerDecoder(args, tgt_dict, embed_tokens)
if getattr(args, "apply_bert_init", False):
decoder.apply(init_bert_params)
return decoder
def forward(
self, src_tokens, src_lengths, prev_output_tokens, tgt_tokens, **kwargs
):
# encoding
encoder_out = self.encoder(src_tokens, src_lengths=src_lengths, **kwargs)
# length prediction
length_out = self.decoder.forward_length(
normalize=False, encoder_out=encoder_out
)
length_tgt = self.decoder.forward_length_prediction(
length_out, encoder_out, tgt_tokens
)
# decoding
word_ins_out = self.decoder(
normalize=False,
prev_output_tokens=prev_output_tokens,
encoder_out=encoder_out,
)
return {
"word_ins": {
"out": word_ins_out,
"tgt": tgt_tokens,
"mask": tgt_tokens.ne(self.pad),
"ls": self.args.label_smoothing,
"nll_loss": True,
},
"length": {
"out": length_out,
"tgt": length_tgt,
"factor": self.decoder.length_loss_factor,
},
}
def forward_decoder(self, decoder_out, encoder_out, decoding_format=None, **kwargs):
step = decoder_out.step
output_tokens = decoder_out.output_tokens
output_scores = decoder_out.output_scores
history = decoder_out.history
# execute the decoder
output_masks = output_tokens.ne(self.pad)
_scores, _tokens = self.decoder(
normalize=True,
prev_output_tokens=output_tokens,
encoder_out=encoder_out,
step=step,
).max(-1)
output_tokens.masked_scatter_(output_masks, _tokens[output_masks])
output_scores.masked_scatter_(output_masks, _scores[output_masks])
if history is not None:
history.append(output_tokens.clone())
return decoder_out._replace(
output_tokens=output_tokens,
output_scores=output_scores,
attn=None,
history=history,
)
def initialize_output_tokens(self, encoder_out, src_tokens):
# length prediction
length_tgt = self.decoder.forward_length_prediction(
self.decoder.forward_length(normalize=True, encoder_out=encoder_out),
encoder_out=encoder_out,
)
max_length = length_tgt.clamp_(min=2).max()
idx_length = utils.new_arange(src_tokens, max_length)
initial_output_tokens = src_tokens.new_zeros(
src_tokens.size(0), max_length
).fill_(self.pad)
initial_output_tokens.masked_fill_(
idx_length[None, :] < length_tgt[:, None], self.unk
)
initial_output_tokens[:, 0] = self.bos
initial_output_tokens.scatter_(1, length_tgt[:, None] - 1, self.eos)
initial_output_scores = initial_output_tokens.new_zeros(
*initial_output_tokens.size()
).type_as(encoder_out["encoder_out"][0])
return DecoderOut(
output_tokens=initial_output_tokens,
output_scores=initial_output_scores,
attn=None,
step=0,
max_step=0,
history=None,
)
def regenerate_length_beam(self, decoder_out, beam_size):
output_tokens = decoder_out.output_tokens
length_tgt = output_tokens.ne(self.pad).sum(1)
length_tgt = (
length_tgt[:, None]
+ utils.new_arange(length_tgt, 1, beam_size)
- beam_size // 2
)
length_tgt = length_tgt.view(-1).clamp_(min=2)
max_length = length_tgt.max()
idx_length = utils.new_arange(length_tgt, max_length)
initial_output_tokens = output_tokens.new_zeros(
length_tgt.size(0), max_length
).fill_(self.pad)
initial_output_tokens.masked_fill_(
idx_length[None, :] < length_tgt[:, None], self.unk
)
initial_output_tokens[:, 0] = self.bos
initial_output_tokens.scatter_(1, length_tgt[:, None] - 1, self.eos)
initial_output_scores = initial_output_tokens.new_zeros(
*initial_output_tokens.size()
).type_as(decoder_out.output_scores)
return decoder_out._replace(
output_tokens=initial_output_tokens, output_scores=initial_output_scores
)
class NATransformerDecoder(FairseqNATDecoder):
def __init__(self, args, dictionary, embed_tokens, no_encoder_attn=False):
super().__init__(
args, dictionary, embed_tokens, no_encoder_attn=no_encoder_attn
)
self.dictionary = dictionary
self.bos = dictionary.bos()
self.unk = dictionary.unk()
self.eos = dictionary.eos()
self.encoder_embed_dim = args.encoder_embed_dim
self.sg_length_pred = getattr(args, "sg_length_pred", False)
self.pred_length_offset = getattr(args, "pred_length_offset", False)
self.length_loss_factor = getattr(args, "length_loss_factor", 0.1)
self.src_embedding_copy = getattr(args, "src_embedding_copy", False)
self.embed_length = Embedding(256, self.encoder_embed_dim, None)
@ensemble_decoder
def forward(self, normalize, encoder_out, prev_output_tokens, step=0, **unused):
features, _ = self.extract_features(
prev_output_tokens,
encoder_out=encoder_out,
embedding_copy=(step == 0) & self.src_embedding_copy,
)
decoder_out = self.output_layer(features)
return F.log_softmax(decoder_out, -1) if normalize else decoder_out
@ensemble_decoder
def forward_length(self, normalize, encoder_out):
enc_feats = encoder_out["encoder_out"][0] # T x B x C
if len(encoder_out["encoder_padding_mask"]) > 0:
src_masks = encoder_out["encoder_padding_mask"][0] # B x T
else:
src_masks = None
enc_feats = _mean_pooling(enc_feats, src_masks)
if self.sg_length_pred:
enc_feats = enc_feats.detach()
length_out = F.linear(enc_feats, self.embed_length.weight)
return F.log_softmax(length_out, -1) if normalize else length_out
def extract_features(
self,
prev_output_tokens,
encoder_out=None,
early_exit=None,
embedding_copy=False,
**unused
):
"""
Similar to *forward* but only return features.
Inputs:
prev_output_tokens: Tensor(B, T)
encoder_out: a dictionary of hidden states and masks
Returns:
tuple:
- the decoder's features of shape `(batch, tgt_len, embed_dim)`
- a dictionary with any model-specific outputs
the LevenshteinTransformer decoder has full-attention to all generated tokens
"""
# embedding
if embedding_copy:
src_embd = encoder_out["encoder_embedding"][0]
if len(encoder_out["encoder_padding_mask"]) > 0:
src_mask = encoder_out["encoder_padding_mask"][0]
else:
src_mask = None
src_mask = (
~src_mask
if src_mask is not None
else prev_output_tokens.new_ones(*src_embd.size()[:2]).bool()
)
x, decoder_padding_mask = self.forward_embedding(
prev_output_tokens,
self.forward_copying_source(
src_embd, src_mask, prev_output_tokens.ne(self.padding_idx)
),
)
else:
x, decoder_padding_mask = self.forward_embedding(prev_output_tokens)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
attn = None
inner_states = [x]
# decoder layers
for i, layer in enumerate(self.layers):
# early exit from the decoder.
if (early_exit is not None) and (i >= early_exit):
break
x, attn, _ = layer(
x,
encoder_out["encoder_out"][0]
if (encoder_out is not None and len(encoder_out["encoder_out"]) > 0)
else None,
encoder_out["encoder_padding_mask"][0]
if (
encoder_out is not None
and len(encoder_out["encoder_padding_mask"]) > 0
)
else None,
self_attn_mask=None,
self_attn_padding_mask=decoder_padding_mask,
)
inner_states.append(x)
if self.layer_norm:
x = self.layer_norm(x)
# T x B x C -> B x T x C
x = x.transpose(0, 1)
if self.project_out_dim is not None:
x = self.project_out_dim(x)
return x, {"attn": attn, "inner_states": inner_states}
def forward_embedding(self, prev_output_tokens, states=None):
# embed positions
positions = (
self.embed_positions(prev_output_tokens)
if self.embed_positions is not None
else None
)
# embed tokens and positions
if states is None:
x = self.embed_scale * self.embed_tokens(prev_output_tokens)
if self.project_in_dim is not None:
x = self.project_in_dim(x)
else:
x = states
if positions is not None:
x += positions
x = self.dropout_module(x)
decoder_padding_mask = prev_output_tokens.eq(self.padding_idx)
return x, decoder_padding_mask
def forward_copying_source(self, src_embeds, src_masks, tgt_masks):
length_sources = src_masks.sum(1)
length_targets = tgt_masks.sum(1)
mapped_inputs = _uniform_assignment(length_sources, length_targets).masked_fill(
~tgt_masks, 0
)
copied_embedding = torch.gather(
src_embeds,
1,
mapped_inputs.unsqueeze(-1).expand(
*mapped_inputs.size(), src_embeds.size(-1)
),
)
return copied_embedding
def forward_length_prediction(self, length_out, encoder_out, tgt_tokens=None):
enc_feats = encoder_out["encoder_out"][0] # T x B x C
if len(encoder_out["encoder_padding_mask"]) > 0:
src_masks = encoder_out["encoder_padding_mask"][0] # B x T
else:
src_masks = None
if self.pred_length_offset:
if src_masks is None:
src_lengs = enc_feats.new_ones(enc_feats.size(1)).fill_(
enc_feats.size(0)
)
else:
src_lengs = (~src_masks).transpose(0, 1).type_as(enc_feats).sum(0)
src_lengs = src_lengs.long()
if tgt_tokens is not None:
# obtain the length target
tgt_lengs = tgt_tokens.ne(self.padding_idx).sum(1).long()
if self.pred_length_offset:
length_tgt = tgt_lengs - src_lengs + 128
else:
length_tgt = tgt_lengs
length_tgt = length_tgt.clamp(min=0, max=255)
else:
# predict the length target (greedy for now)
# TODO: implementing length-beam
pred_lengs = length_out.max(-1)[1]
if self.pred_length_offset:
length_tgt = pred_lengs - 128 + src_lengs
else:
length_tgt = pred_lengs
return length_tgt
@register_model_architecture(
"nonautoregressive_transformer", "nonautoregressive_transformer"
)
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 512)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 2048)
args.encoder_layers = getattr(args, "encoder_layers", 6)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 8)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", False)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 8)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", False)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.activation_fn = getattr(args, "activation_fn", "relu")
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.apply_bert_init = getattr(args, "apply_bert_init", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
# --- special arguments ---
args.sg_length_pred = getattr(args, "sg_length_pred", False)
args.pred_length_offset = getattr(args, "pred_length_offset", False)
args.length_loss_factor = getattr(args, "length_loss_factor", 0.1)
args.src_embedding_copy = getattr(args, "src_embedding_copy", False)
@register_model_architecture(
"nonautoregressive_transformer", "nonautoregressive_transformer_wmt_en_de"
)
def nonautoregressive_transformer_wmt_en_de(args):
base_architecture(args)
|
COCO-LM/fairseq/fairseq/models/nat/nonautoregressive_transformer.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/models/nat/nonautoregressive_transformer.py",
"repo_id": "COCO-LM",
"token_count": 8210
}
| 188 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import logging
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq import utils
from fairseq.modules import (
LayerNorm,
)
class PoolerLogits(nn.Module):
"""
Compute SQuAD start logits from sequence hidden states.
Args:
config (:class:`~transformers.PretrainedConfig`):
The config used by the model, will be used to grab the :obj:`hidden_size` of the model.
"""
def __init__(self, hidden_size):
super().__init__()
self.dense = nn.Linear(hidden_size, 1)
self.dense.weight.data.normal_(mean=0.0, std=0.02)
self.dense.bias.data.zero_()
def forward(
self, hidden_states, p_mask = None
):
"""
Args:
hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, seq_len, hidden_size)`):
The final hidden states of the model.
p_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, seq_len)`, `optional`):
Mask for tokens at invalid position, such as query and special symbols (PAD, SEP, CLS). 1.0 means token
should be masked.
Returns:
:obj:`torch.FloatTensor`: The start logits for SQuAD.
"""
x = self.dense(hidden_states).squeeze(-1)
if p_mask is not None:
x.masked_fill_(p_mask, float('-inf'))
return x
class SQuADHead(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.start_logits = PoolerLogits(hidden_size)
self.end_logits = PoolerLogits(hidden_size)
def forward(
self,
hidden_states,
start_positions=None,
end_positions=None,
p_mask = None,
):
"""
Args:
hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, seq_len, hidden_size)`):
Final hidden states of the model on the sequence tokens.
start_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Positions of the first token for the labeled span.
end_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Positions of the last token for the labeled span.
is_impossible (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Whether the question has a possible answer in the paragraph or not.
p_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, seq_len)`, `optional`):
Mask for tokens at invalid position, such as query and special symbols (PAD, SEP, CLS). 1.0 means token
should be masked.
Returns:
"""
start_logits = self.start_logits(hidden_states, p_mask=p_mask)
end_logits = self.end_logits(hidden_states, p_mask=p_mask)
if start_positions is not None and end_positions is not None:
def loss_fct(logits, targets):
return F.nll_loss(
F.log_softmax(
logits.view(-1, logits.size(-1)),
dim=-1,
dtype=torch.float32,
),
targets.view(-1),
reduction='sum',
)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) * 0.5
return total_loss
else:
return start_logits, end_logits
|
COCO-LM/fairseq/fairseq/models/squad/squad_head.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/models/squad/squad_head.py",
"repo_id": "COCO-LM",
"token_count": 1742
}
| 189 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import torch
import torch.nn.functional as F
logger = logging.getLogger(__name__)
def _cross_entropy_pytorch(logits, target, ignore_index=None, reduction="mean"):
lprobs = F.log_softmax(logits, dim=-1, dtype=torch.float32)
return F.nll_loss(
lprobs,
target,
ignore_index=ignore_index,
reduction=reduction,
)
try:
import fused_xentropy_cuda
logger.info("using fused cross entropy")
class SoftmaxCrossEntropyLoss(torch.autograd.Function):
@staticmethod
def forward(ctx, logits, labels, padding_idx=0, half_to_float=False):
losses, max_log_sum_exp = fused_xentropy_cuda.forward(
logits, labels, half_to_float)
if padding_idx >= 0:
losses.masked_fill_(labels==padding_idx, 0)
ctx.save_for_backward(logits, max_log_sum_exp, labels,
torch.LongTensor([padding_idx]))
return losses
@staticmethod
def backward(ctx, grad_loss):
logits, max_log_sum_exp, labels, padding_idx = ctx.saved_tensors
if not grad_loss.is_contiguous():
grad_loss = grad_loss.contiguous()
padding_idx = padding_idx.item()
if padding_idx >= 0:
grad_loss.masked_fill_(labels==padding_idx, 0)
grad_logits = fused_xentropy_cuda.backward(
grad_loss.contiguous(), logits, max_log_sum_exp,
labels)
return grad_logits, None, None, None
def cross_entropy(logits, target, ignore_index=-100, reduction="mean"):
if logits.device == torch.device("cpu"):
return _cross_entropy_pytorch(logits, target, ignore_index, reduction)
else:
half_to_float = (logits.dtype == torch.half) or (logits.dtype == torch.bfloat16)
losses = SoftmaxCrossEntropyLoss.apply(
logits, target, ignore_index, half_to_float,
)
if reduction == "sum":
return losses.sum()
elif reduction == "mean":
if ignore_index >= 0:
return losses.sum() / target.ne(ignore_index).sum()
else:
return losses.mean()
elif reduction == "none":
return losses
else:
raise NotImplementedError
except ImportError:
def cross_entropy(logits, target, ignore_index=-100, reduction="mean"):
return _cross_entropy_pytorch(logits, target, ignore_index, reduction)
|
COCO-LM/fairseq/fairseq/modules/cross_entropy.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/modules/cross_entropy.py",
"repo_id": "COCO-LM",
"token_count": 1287
}
| 190 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
class GradMultiply(torch.autograd.Function):
@staticmethod
def forward(ctx, x, scale):
ctx.scale = scale
res = x.new(x)
return res
@staticmethod
def backward(ctx, grad):
return grad * ctx.scale, None
|
COCO-LM/fairseq/fairseq/modules/grad_multiply.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/modules/grad_multiply.py",
"repo_id": "COCO-LM",
"token_count": 160
}
| 191 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch.nn as nn
from .learned_positional_embedding import LearnedPositionalEmbedding
from .sinusoidal_positional_embedding import SinusoidalPositionalEmbedding
def PositionalEmbedding(
num_embeddings: int,
embedding_dim: int,
padding_idx: int,
learned: bool = False,
):
if learned:
# if padding_idx is specified then offset the embedding ids by
# this index and adjust num_embeddings appropriately
# TODO: The right place for this offset would be inside
# LearnedPositionalEmbedding. Move this there for a cleaner implementation.
if padding_idx is not None:
num_embeddings = num_embeddings + padding_idx + 1
m = LearnedPositionalEmbedding(num_embeddings, embedding_dim, padding_idx)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
if padding_idx is not None:
nn.init.constant_(m.weight[padding_idx], 0)
else:
m = SinusoidalPositionalEmbedding(
embedding_dim,
padding_idx,
init_size=num_embeddings + padding_idx + 1,
)
return m
|
COCO-LM/fairseq/fairseq/modules/positional_embedding.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/modules/positional_embedding.py",
"repo_id": "COCO-LM",
"token_count": 509
}
| 192 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
import torch.nn as nn
import torch.nn.functional as F
from ..ops import emulate_int
class IntEmbedding(nn.Module):
"""
Quantized counterpart of the nn.Embedding module that applies QuantNoise during training.
Args:
- num_embeddings: number of tokens
- embedding_dim: embedding dimension
- p: amount of noise to inject (0 = no quantization, 1 = quantize all the weights)
- bits: number of bits
- method: choose among {"tensor", "histogram", "channel"}
- update_step: recompute scale and zero_point every update_steps iterations
Remarks:
- We use the straight-through estimator so that the gradients
back-propagate nicely in the network, this is implemented with
the detach() trick
- Parameters scale and zero_point are recomputed every update_step
forward pass to reduce the overhead
- At test time, the weights are fully quantized
"""
def __init__(
self,
num_embeddings,
embedding_dim,
padding_idx=None,
max_norm=None,
norm_type=2.0,
scale_grad_by_freq=False,
sparse=False,
_weight=None,
p=0,
update_step=1000,
bits=8,
method="histogram",
):
super(IntEmbedding, self).__init__()
self.num_embeddings = num_embeddings
self.embedding_dim = embedding_dim
if padding_idx is not None:
if padding_idx > 0:
assert (
padding_idx < self.num_embeddings
), "Padding_idx must be within num_embeddings"
elif padding_idx < 0:
assert (
padding_idx >= -self.num_embeddings
), "Padding_idx must be within num_embeddings"
padding_idx = self.num_embeddings + padding_idx
self.padding_idx = padding_idx
self.max_norm = max_norm
self.norm_type = norm_type
self.scale_grad_by_freq = scale_grad_by_freq
if _weight is None:
self.weight = nn.Parameter(torch.Tensor(num_embeddings, embedding_dim))
self.reset_parameters()
else:
assert list(_weight.shape) == [
num_embeddings,
embedding_dim,
], "Shape of weight does not match num_embeddings and embedding_dim"
self.weight = nn.Parameter(_weight)
self.sparse = sparse
# quantization parameters
self.p = p
self.bits = bits
self.method = method
self.update_step = update_step
self.counter = 0
def reset_parameters(self):
nn.init.normal_(self.weight)
if self.padding_idx is not None:
with torch.no_grad():
self.weight[self.padding_idx].fill_(0)
def forward(self, input):
# train with QuantNoise and evaluate the fully quantized network
p = self.p if self.training else 1
# update parameters every 1000 iterations
if self.counter % self.update_step == 0:
self.scale = None
self.zero_point = None
self.counter += 1
# quantize weight
weight_quantized, self.scale, self.zero_point = emulate_int(
self.weight.detach(),
bits=self.bits,
method=self.method,
scale=self.scale,
zero_point=self.zero_point,
)
# mask to apply noise
mask = torch.zeros_like(self.weight)
mask.bernoulli_(1 - p)
noise = (weight_quantized - self.weight).masked_fill(mask.bool(), 0)
# using straight-through estimator (STE)
clamp_low = -self.scale * self.zero_point
clamp_high = self.scale * (2 ** self.bits - 1 - self.zero_point)
weight = (
torch.clamp(self.weight, clamp_low.item(), clamp_high.item())
+ noise.detach()
)
# return output
output = F.embedding(
input,
weight,
self.padding_idx,
self.max_norm,
self.norm_type,
self.scale_grad_by_freq,
self.sparse,
)
return output
def extra_repr(self):
s = "{num_embeddings}, {embedding_dim}"
if self.padding_idx is not None:
s += ", padding_idx={padding_idx}"
if self.max_norm is not None:
s += ", max_norm={max_norm}"
if self.norm_type != 2:
s += ", norm_type={norm_type}"
if self.scale_grad_by_freq is not False:
s += ", scale_grad_by_freq={scale_grad_by_freq}"
if self.sparse is not False:
s += ", sparse=True"
s += "quant_noise={p}, bits={bits}, method={method}"
return s.format(**self.__dict__)
|
COCO-LM/fairseq/fairseq/modules/quantization/scalar/modules/qemb.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/modules/quantization/scalar/modules/qemb.py",
"repo_id": "COCO-LM",
"token_count": 2331
}
| 193 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import torch
logger = logging.getLogger(__name__)
class NanDetector:
"""
Detects the first NaN or Inf in forward and/or backward pass and logs, together with the module name
"""
def __init__(self, model, forward=True, backward=True):
self.bhooks = []
self.fhooks = []
self.forward = forward
self.backward = backward
self.named_parameters = list(model.named_parameters())
self.reset()
for name, mod in model.named_modules():
mod.__module_name = name
self.add_hooks(mod)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
# Dump out all model gnorms to enable better debugging
norm = {}
gradients = {}
for name, param in self.named_parameters:
if param.grad is not None:
grad_norm = torch.norm(param.grad.data, p=2, dtype=torch.float32)
norm[name] = grad_norm.item()
if torch.isnan(grad_norm).any() or torch.isinf(grad_norm).any():
gradients[name] = param.grad.data
if len(gradients) > 0:
logger.info("Detected nan/inf grad norm, dumping norms...")
logger.info(f"norms: {norm}")
logger.info(f"gradients: {gradients}")
self.close()
def add_hooks(self, module):
if self.forward:
self.fhooks.append(module.register_forward_hook(self.fhook_fn))
if self.backward:
self.bhooks.append(module.register_backward_hook(self.bhook_fn))
def reset(self):
self.has_printed_f = False
self.has_printed_b = False
def _detect(self, tensor, name, backward):
err = None
if (
torch.is_floating_point(tensor)
# single value tensors (like the loss) will not provide much info
and tensor.numel() >= 2
):
with torch.no_grad():
if torch.isnan(tensor).any():
err = "NaN"
elif torch.isinf(tensor).any():
err = "Inf"
if err is not None:
err = f"{err} detected in output of {name}, shape: {tensor.shape}, {'backward' if backward else 'forward'}"
return err
def _apply(self, module, inp, x, backward):
if torch.is_tensor(x):
if isinstance(inp, tuple) and len(inp) > 0:
inp = inp[0]
err = self._detect(x, module.__module_name, backward)
if err is not None:
if torch.is_tensor(inp) and not backward:
err += (
f" input max: {inp.max().item()}, input min: {inp.min().item()}"
)
has_printed_attr = "has_printed_b" if backward else "has_printed_f"
logger.warning(err)
setattr(self, has_printed_attr, True)
elif isinstance(x, dict):
for v in x.values():
self._apply(module, inp, v, backward)
elif isinstance(x, list) or isinstance(x, tuple):
for v in x:
self._apply(module, inp, v, backward)
def fhook_fn(self, module, inp, output):
if not self.has_printed_f:
self._apply(module, inp, output, backward=False)
def bhook_fn(self, module, inp, output):
if not self.has_printed_b:
self._apply(module, inp, output, backward=True)
def close(self):
for hook in self.fhooks + self.bhooks:
hook.remove()
|
COCO-LM/fairseq/fairseq/nan_detector.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/nan_detector.py",
"repo_id": "COCO-LM",
"token_count": 1799
}
| 194 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""isort:skip_file"""
import importlib
import os
from fairseq import registry
from fairseq.optim.lr_scheduler.fairseq_lr_scheduler import ( # noqa
FairseqLRScheduler,
LegacyFairseqLRScheduler,
)
from omegaconf import DictConfig
(
build_lr_scheduler_,
register_lr_scheduler,
LR_SCHEDULER_REGISTRY,
LR_SCHEDULER_DATACLASS_REGISTRY,
) = registry.setup_registry(
"--lr-scheduler", base_class=FairseqLRScheduler, default="fixed"
)
def build_lr_scheduler(cfg: DictConfig, optimizer):
return build_lr_scheduler_(cfg, optimizer)
# automatically import any Python files in the optim/lr_scheduler/ directory
for file in os.listdir(os.path.dirname(__file__)):
if file.endswith(".py") and not file.startswith("_"):
file_name = file[: file.find(".py")]
importlib.import_module("fairseq.optim.lr_scheduler." + file_name)
|
COCO-LM/fairseq/fairseq/optim/lr_scheduler/__init__.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/optim/lr_scheduler/__init__.py",
"repo_id": "COCO-LM",
"token_count": 392
}
| 195 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
from fairseq.modules.quantization import pq, quantization_options, scalar
from omegaconf import DictConfig
logger = logging.getLogger(__name__)
def quantize_model_scalar(model, model_cfg: DictConfig):
quant_noise_scalar = getattr(model_cfg, "quant_noise_scalar", 0) or 0
if quant_noise_scalar > 0:
# quantize_model edits the model in place
scalar.quantize_model_(model, p=quant_noise_scalar, bits=8, update_step=1000)
return model
class Quantizer(object):
def __init__(self, config_path, max_epoch, max_update):
try:
import yaml
except ImportError:
raise ImportError("Please install yaml with: pip install yaml")
# parse config
if config_path:
with open(config_path) as config_file:
config = quantization_options.parse_config_yaml(
yaml.safe_load(config_file)
)
else:
config = quantization_options.parse_config_yaml({})
self.n_centroids_config = config["n_centroids"]
self.block_sizes_config = config["block_sizes"]
self.layers_to_quantize = config["layers_to_quantize"]
# We assume that training will run for a fixed number of epochs
# (or updates) and that we should train for equal durations
# between iterations of PQ.
num_iterations = len(self.layers_to_quantize)
if max_epoch > 0:
assert max_epoch % num_iterations == 0, (
"for iterative PQ, --max-epoch (={}) must be evenly divisible by "
"len(layers_to_quantize) (={})".format(max_epoch, num_iterations)
)
self.epoch_schedule = max_epoch // num_iterations
else:
self.epoch_schedule = None
if max_update > 0:
assert max_update % num_iterations == 0, (
"for iterative PQ, --max-update (={}) must be evenly divisible by "
"len(layers_to_quantize) (={})".format(max_update, num_iterations)
)
self.update_schedule = max_update // num_iterations
else:
self.update_schedule = None
assert (self.epoch_schedule is not None) ^ (
self.update_schedule is not None
), "for iterative PQ, cannot specify both --max-update and --max-epoch"
# 0 is a special value for quantization step, which will force
# the first call to begin_epoch() to call step()
self.quantization_step = 0
def set_trainer(self, trainer):
self.trainer = trainer
self.size_tracker = pq.SizeTracker(self.trainer.get_model())
def step(self):
"""Move to the next stage of quantization."""
if self.quantization_step >= len(self.layers_to_quantize):
# Maybe we just finished the last training step or we loaded
# a checkpoint for an iterative PQ model which previously
# finished training. Either way, don't quantize again.
return
logger.info(
"quantizing model (step={}; layers_to_quantize[step]={})".format(
self.quantization_step, self.layers_to_quantize[self.quantization_step]
)
)
quantized_layers = pq.quantize_model_(
self.trainer.get_model(),
self.size_tracker,
self.layers_to_quantize,
self.block_sizes_config,
self.n_centroids_config,
step=self.quantization_step,
)
logger.info("quantized layers: {}".format(quantized_layers))
logger.info(self.size_tracker)
self.quantization_step += 1
# reintialize the Trainer since model parameters have changed
self.trainer.reinitialize()
def begin_epoch(self, epoch):
"""Called at the beginning of each epoch (epochs start at 1)."""
if (
(
self.epoch_schedule is not None
and epoch > 0
and (epoch - 1) % self.epoch_schedule == 0
)
# we always step once in the beginning, even if using
# update-based quantization
or self.quantization_step == 0
):
self.step()
def step_update(self, num_updates):
"""Called at the end of each step."""
if (
self.update_schedule is not None
and num_updates > 0
and num_updates % self.update_schedule == 0
):
self.step()
def state_dict(self):
return {
"n_centroids_config": self.n_centroids_config,
"block_sizes_config": self.block_sizes_config,
"layers_to_quantize": self.layers_to_quantize,
"epoch_schedule": self.epoch_schedule,
"update_schedule": self.update_schedule,
"quantization_step": self.quantization_step,
}
def load_state_dict(self, state_dict):
self.n_centroids_config = state_dict["n_centroids_config"]
self.block_sizes_config = state_dict["block_sizes_config"]
self.layers_to_quantize = state_dict["layers_to_quantize"]
self.epoch_schedule = state_dict["epoch_schedule"]
self.update_schedule = state_dict["update_schedule"]
self.quantization_step = state_dict["quantization_step"]
|
COCO-LM/fairseq/fairseq/quantization_utils.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/quantization_utils.py",
"repo_id": "COCO-LM",
"token_count": 2488
}
| 196 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import itertools
import logging
import os
import numpy as np
from fairseq import tokenizer, utils
from fairseq.data import ConcatDataset, Dictionary, data_utils, indexed_dataset
from fairseq.data.legacy.block_pair_dataset import BlockPairDataset
from fairseq.data.legacy.masked_lm_dataset import MaskedLMDataset
from fairseq.data.legacy.masked_lm_dictionary import BertDictionary
from fairseq.tasks import LegacyFairseqTask, register_task
logger = logging.getLogger(__name__)
@register_task("legacy_masked_lm")
class LegacyMaskedLMTask(LegacyFairseqTask):
"""
Task for training Masked LM (BERT) model.
Args:
dictionary (Dictionary): the dictionary for the input of the task
"""
@staticmethod
def add_args(parser):
"""Add task-specific arguments to the parser."""
parser.add_argument(
"data",
help="colon separated path to data directories list, \
will be iterated upon during epochs in round-robin manner",
)
parser.add_argument(
"--tokens-per-sample",
default=512,
type=int,
help="max number of total tokens over all segments"
" per sample for BERT dataset",
)
parser.add_argument(
"--break-mode", default="doc", type=str, help="mode for breaking sentence"
)
parser.add_argument("--shuffle-dataset", action="store_true", default=False)
def __init__(self, args, dictionary):
super().__init__(args)
self.dictionary = dictionary
self.seed = args.seed
@classmethod
def load_dictionary(cls, filename):
return BertDictionary.load(filename)
@classmethod
def build_dictionary(
cls, filenames, workers=1, threshold=-1, nwords=-1, padding_factor=8
):
d = BertDictionary()
for filename in filenames:
Dictionary.add_file_to_dictionary(
filename, d, tokenizer.tokenize_line, workers
)
d.finalize(threshold=threshold, nwords=nwords, padding_factor=padding_factor)
return d
@property
def target_dictionary(self):
return self.dictionary
@classmethod
def setup_task(cls, args, **kwargs):
"""Setup the task."""
paths = utils.split_paths(args.data)
assert len(paths) > 0
dictionary = BertDictionary.load(os.path.join(paths[0], "dict.txt"))
logger.info("dictionary: {} types".format(len(dictionary)))
return cls(args, dictionary)
def load_dataset(self, split, epoch=1, combine=False):
"""Load a given dataset split.
Args:
split (str): name of the split (e.g., train, valid, test)
"""
loaded_datasets = []
paths = utils.split_paths(self.args.data)
assert len(paths) > 0
data_path = paths[(epoch - 1) % len(paths)]
logger.info("data_path", data_path)
for k in itertools.count():
split_k = split + (str(k) if k > 0 else "")
path = os.path.join(data_path, split_k)
ds = indexed_dataset.make_dataset(
path,
impl=self.args.dataset_impl,
fix_lua_indexing=True,
dictionary=self.dictionary,
)
if ds is None:
if k > 0:
break
else:
raise FileNotFoundError(
"Dataset not found: {} ({})".format(split, data_path)
)
with data_utils.numpy_seed(self.seed + k):
loaded_datasets.append(
BlockPairDataset(
ds,
self.dictionary,
ds.sizes,
self.args.tokens_per_sample,
break_mode=self.args.break_mode,
doc_break_size=1,
)
)
logger.info(
"{} {} {} examples".format(data_path, split_k, len(loaded_datasets[-1]))
)
if not combine:
break
if len(loaded_datasets) == 1:
dataset = loaded_datasets[0]
sizes = dataset.sizes
else:
dataset = ConcatDataset(loaded_datasets)
sizes = np.concatenate([ds.sizes for ds in loaded_datasets])
self.datasets[split] = MaskedLMDataset(
dataset=dataset,
sizes=sizes,
vocab=self.dictionary,
pad_idx=self.dictionary.pad(),
mask_idx=self.dictionary.mask(),
classif_token_idx=self.dictionary.cls(),
sep_token_idx=self.dictionary.sep(),
shuffle=self.args.shuffle_dataset,
seed=self.seed,
)
|
COCO-LM/fairseq/fairseq/tasks/legacy_masked_lm.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/tasks/legacy_masked_lm.py",
"repo_id": "COCO-LM",
"token_count": 2454
}
| 197 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import re
SPACE_NORMALIZER = re.compile(r"\s+")
def tokenize_line(line):
line = SPACE_NORMALIZER.sub(" ", line)
line = line.strip()
return line.split()
|
COCO-LM/fairseq/fairseq/tokenizer.py/0
|
{
"file_path": "COCO-LM/fairseq/fairseq/tokenizer.py",
"repo_id": "COCO-LM",
"token_count": 116
}
| 198 |
#ifndef TORCH_CHECK
#define TORCH_CHECK AT_CHECK
#endif
#ifdef VERSION_GE_1_3
#define DATA_PTR data_ptr
#else
#define DATA_PTR data
#endif
|
COCO-LM/fairseq/fused_ops/csrc/compat.h/0
|
{
"file_path": "COCO-LM/fairseq/fused_ops/csrc/compat.h",
"repo_id": "COCO-LM",
"token_count": 59
}
| 199 |
pip install --user --editable .
pip install --user sentencepiece
if [ -d fused_ops ]
then
pip install --user --editable fused_ops
fi
|
COCO-LM/fairseq/install.sh/0
|
{
"file_path": "COCO-LM/fairseq/install.sh",
"repo_id": "COCO-LM",
"token_count": 47
}
| 200 |
-- Copyright (c) Facebook, Inc. and its affiliates.
--
-- This source code is licensed under the MIT license found in the
-- LICENSE file in the root directory of this source tree.
--
-- Usage: convert_model.lua <model_epoch1.th7>
require 'torch'
local fairseq = require 'fairseq'
model = torch.load(arg[1])
function find_weight_norm(container, module)
for _, wn in ipairs(container:listModules()) do
if torch.type(wn) == 'nn.WeightNorm' and wn.modules[1] == module then
return wn
end
end
end
function push_state(dict, key, module)
if torch.type(module) == 'nn.Linear' then
local wn = find_weight_norm(model.module, module)
assert(wn)
dict[key .. '.weight_v'] = wn.v:float()
dict[key .. '.weight_g'] = wn.g:float()
elseif torch.type(module) == 'nn.TemporalConvolutionTBC' then
local wn = find_weight_norm(model.module, module)
assert(wn)
local v = wn.v:float():view(wn.viewOut):transpose(2, 3)
dict[key .. '.weight_v'] = v
dict[key .. '.weight_g'] = wn.g:float():view(module.weight:size(3), 1, 1)
else
dict[key .. '.weight'] = module.weight:float()
end
if module.bias then
dict[key .. '.bias'] = module.bias:float()
end
end
encoder_dict = {}
decoder_dict = {}
combined_dict = {}
function encoder_state(encoder)
luts = encoder:findModules('nn.LookupTable')
push_state(encoder_dict, 'embed_tokens', luts[1])
push_state(encoder_dict, 'embed_positions', luts[2])
fcs = encoder:findModules('nn.Linear')
assert(#fcs >= 2)
local nInputPlane = fcs[1].weight:size(1)
push_state(encoder_dict, 'fc1', table.remove(fcs, 1))
push_state(encoder_dict, 'fc2', table.remove(fcs, #fcs))
for i, module in ipairs(encoder:findModules('nn.TemporalConvolutionTBC')) do
push_state(encoder_dict, 'convolutions.' .. tostring(i - 1), module)
if nInputPlane ~= module.weight:size(3) / 2 then
push_state(encoder_dict, 'projections.' .. tostring(i - 1), table.remove(fcs, 1))
end
nInputPlane = module.weight:size(3) / 2
end
assert(#fcs == 0)
end
function decoder_state(decoder)
luts = decoder:findModules('nn.LookupTable')
push_state(decoder_dict, 'embed_tokens', luts[1])
push_state(decoder_dict, 'embed_positions', luts[2])
fcs = decoder:findModules('nn.Linear')
local nInputPlane = fcs[1].weight:size(1)
push_state(decoder_dict, 'fc1', table.remove(fcs, 1))
push_state(decoder_dict, 'fc2', fcs[#fcs - 1])
push_state(decoder_dict, 'fc3', fcs[#fcs])
table.remove(fcs, #fcs)
table.remove(fcs, #fcs)
for i, module in ipairs(decoder:findModules('nn.TemporalConvolutionTBC')) do
if nInputPlane ~= module.weight:size(3) / 2 then
push_state(decoder_dict, 'projections.' .. tostring(i - 1), table.remove(fcs, 1))
end
nInputPlane = module.weight:size(3) / 2
local prefix = 'attention.' .. tostring(i - 1)
push_state(decoder_dict, prefix .. '.in_projection', table.remove(fcs, 1))
push_state(decoder_dict, prefix .. '.out_projection', table.remove(fcs, 1))
push_state(decoder_dict, 'convolutions.' .. tostring(i - 1), module)
end
assert(#fcs == 0)
end
_encoder = model.module.modules[2]
_decoder = model.module.modules[3]
encoder_state(_encoder)
decoder_state(_decoder)
for k, v in pairs(encoder_dict) do
combined_dict['encoder.' .. k] = v
end
for k, v in pairs(decoder_dict) do
combined_dict['decoder.' .. k] = v
end
torch.save('state_dict.t7', combined_dict)
|
COCO-LM/fairseq/scripts/convert_model.lua/0
|
{
"file_path": "COCO-LM/fairseq/scripts/convert_model.lua",
"repo_id": "COCO-LM",
"token_count": 1368
}
| 201 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from torch import nn
from fairseq.distributed import ModuleProxyWrapper
from .utils import objects_are_equal
class MockDDPWrapper(nn.Module):
"""A simple wrapper with an interface similar to DistributedDataParallel."""
def __init__(self, module):
super().__init__()
self.module = module
def forward(self, x):
return self.module(x)
class Model(nn.Module):
def __init__(self):
super().__init__()
self.linear = nn.Linear(5, 10)
self.xyz = "hello"
def forward(self, x):
return self.linear(x)
def get_xyz(self):
return self.xyz
class TestModuleProxyWrapper(unittest.TestCase):
def _get_module(self):
module = Model()
wrapped_module = MockDDPWrapper(module)
wrapped_module = ModuleProxyWrapper(wrapped_module)
return wrapped_module, module
def test_getattr_forwarding(self):
wrapped_module, module = self._get_module()
assert module.xyz == "hello"
assert module.get_xyz() == "hello"
assert wrapped_module.xyz == "hello"
wrapped_module.xyz = "world"
assert wrapped_module.xyz == "world"
assert module.get_xyz() == "hello"
def test_state_dict(self):
wrapped_module, module = self._get_module()
assert objects_are_equal(wrapped_module.state_dict(), module.state_dict())
def test_load_state_dict(self):
wrapped_module, module = self._get_module()
wrapped_module.load_state_dict(module.state_dict())
input = torch.rand(4, 5)
torch.testing.assert_allclose(wrapped_module(input), module(input))
def test_forward(self):
wrapped_module, module = self._get_module()
input = torch.rand(4, 5)
torch.testing.assert_allclose(wrapped_module(input), module(input))
if __name__ == "__main__":
unittest.main()
|
COCO-LM/fairseq/tests/distributed/test_module_proxy_wrapper.py/0
|
{
"file_path": "COCO-LM/fairseq/tests/distributed/test_module_proxy_wrapper.py",
"repo_id": "COCO-LM",
"token_count": 817
}
| 202 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from fairseq.data import Dictionary
from fairseq.modules import CharacterTokenEmbedder
class TestCharacterTokenEmbedder(unittest.TestCase):
def test_character_token_embedder(self):
vocab = Dictionary()
vocab.add_symbol("hello")
vocab.add_symbol("there")
embedder = CharacterTokenEmbedder(
vocab, [(2, 16), (4, 32), (8, 64), (16, 2)], 64, 5, 2
)
test_sents = [["hello", "unk", "there"], ["there"], ["hello", "there"]]
max_len = max(len(s) for s in test_sents)
input = torch.LongTensor(len(test_sents), max_len + 2).fill_(vocab.pad())
for i in range(len(test_sents)):
input[i][0] = vocab.eos()
for j in range(len(test_sents[i])):
input[i][j + 1] = vocab.index(test_sents[i][j])
input[i][j + 2] = vocab.eos()
embs = embedder(input)
assert embs.size() == (len(test_sents), max_len + 2, 5)
self.assertAlmostEqual(embs[0][0], embs[1][0])
self.assertAlmostEqual(embs[0][0], embs[0][-1])
self.assertAlmostEqual(embs[0][1], embs[2][1])
self.assertAlmostEqual(embs[0][3], embs[1][1])
embs.sum().backward()
assert embedder.char_embeddings.weight.grad is not None
def assertAlmostEqual(self, t1, t2):
self.assertEqual(t1.size(), t2.size(), "size mismatch")
self.assertLess((t1 - t2).abs().max(), 1e-6)
if __name__ == "__main__":
unittest.main()
|
COCO-LM/fairseq/tests/test_character_token_embedder.py/0
|
{
"file_path": "COCO-LM/fairseq/tests/test_character_token_embedder.py",
"repo_id": "COCO-LM",
"token_count": 757
}
| 203 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import tempfile
import unittest
import torch
from fairseq.data.dictionary import Dictionary
from fairseq.models.lstm import LSTMModel
from fairseq.tasks.fairseq_task import LegacyFairseqTask
DEFAULT_TEST_VOCAB_SIZE = 100
class DummyTask(LegacyFairseqTask):
def __init__(self, args):
super().__init__(args)
self.dictionary = get_dummy_dictionary()
if getattr(self.args, "ctc", False):
self.dictionary.add_symbol("<ctc_blank>")
self.src_dict = self.dictionary
self.tgt_dict = self.dictionary
@property
def source_dictionary(self):
return self.src_dict
@property
def target_dictionary(self):
return self.dictionary
def get_dummy_dictionary(vocab_size=DEFAULT_TEST_VOCAB_SIZE):
dummy_dict = Dictionary()
# add dummy symbol to satisfy vocab size
for id, _ in enumerate(range(vocab_size)):
dummy_dict.add_symbol("{}".format(id), 1000)
return dummy_dict
def get_dummy_task_and_parser():
"""
to build a fariseq model, we need some dummy parse and task. This function
is used to create dummy task and parser to faciliate model/criterion test
Note: we use FbSpeechRecognitionTask as the dummy task. You may want
to use other task by providing another function
"""
parser = argparse.ArgumentParser(
description="test_dummy_s2s_task", argument_default=argparse.SUPPRESS
)
DummyTask.add_args(parser)
args = parser.parse_args([])
task = DummyTask.setup_task(args)
return task, parser
class TestJitLSTMModel(unittest.TestCase):
def _test_save_and_load(self, scripted_module):
with tempfile.NamedTemporaryFile() as f:
scripted_module.save(f.name)
torch.jit.load(f.name)
def assertTensorEqual(self, t1, t2):
t1 = t1[~torch.isnan(t1)] # can cause size mismatch errors if there are NaNs
t2 = t2[~torch.isnan(t2)]
self.assertEqual(t1.size(), t2.size(), "size mismatch")
self.assertEqual(t1.ne(t2).long().sum(), 0)
def test_jit_and_export_lstm(self):
task, parser = get_dummy_task_and_parser()
LSTMModel.add_args(parser)
args = parser.parse_args([])
args.criterion = ""
model = LSTMModel.build_model(args, task)
scripted_model = torch.jit.script(model)
self._test_save_and_load(scripted_model)
def test_assert_jit_vs_nonjit_(self):
task, parser = get_dummy_task_and_parser()
LSTMModel.add_args(parser)
args = parser.parse_args([])
args.criterion = ""
model = LSTMModel.build_model(args, task)
model.eval()
scripted_model = torch.jit.script(model)
scripted_model.eval()
idx = len(task.source_dictionary)
iter = 100
# Inject random input and check output
seq_len_tensor = torch.randint(1, 10, (iter,))
num_samples_tensor = torch.randint(1, 10, (iter,))
for i in range(iter):
seq_len = seq_len_tensor[i]
num_samples = num_samples_tensor[i]
src_token = (torch.randint(0, idx, (num_samples, seq_len)),)
src_lengths = torch.randint(1, seq_len + 1, (num_samples,))
src_lengths, _ = torch.sort(src_lengths, descending=True)
# Force the first sample to have seq_len
src_lengths[0] = seq_len
prev_output_token = (torch.randint(0, idx, (num_samples, 1)),)
result = model(src_token[0], src_lengths, prev_output_token[0], None)
scripted_result = scripted_model(
src_token[0], src_lengths, prev_output_token[0], None
)
self.assertTensorEqual(result[0], scripted_result[0])
self.assertTensorEqual(result[1], scripted_result[1])
if __name__ == "__main__":
unittest.main()
|
COCO-LM/fairseq/tests/test_lstm_jitable.py/0
|
{
"file_path": "COCO-LM/fairseq/tests/test_lstm_jitable.py",
"repo_id": "COCO-LM",
"token_count": 1747
}
| 204 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import json
import os
import random
import sys
from io import StringIO
import torch
import torch.nn.functional as F
from fairseq import options, utils
from fairseq.data import Dictionary
from fairseq.data.language_pair_dataset import collate
from fairseq.models import (
FairseqEncoder,
FairseqEncoderDecoderModel,
FairseqIncrementalDecoder,
)
from fairseq.models.fairseq_encoder import EncoderOut
from fairseq.tasks import LegacyFairseqTask
from fairseq_cli import generate, interactive, preprocess, train, validate
def dummy_dictionary(vocab_size, prefix="token_"):
d = Dictionary()
for i in range(vocab_size):
token = prefix + str(i)
d.add_symbol(token)
d.finalize(padding_factor=1) # don't add extra padding symbols
return d
def dummy_dataloader(
samples,
padding_idx=1,
eos_idx=2,
batch_size=None,
):
if batch_size is None:
batch_size = len(samples)
# add any missing data to samples
for i, sample in enumerate(samples):
if "id" not in sample:
sample["id"] = i
# create dataloader
dataset = TestDataset(samples)
dataloader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
collate_fn=(lambda samples: collate(samples, padding_idx, eos_idx)),
)
return iter(dataloader)
def sequence_generator_setup():
# construct dummy dictionary
d = dummy_dictionary(vocab_size=2)
eos = d.eos()
w1 = 4
w2 = 5
# construct source data
src_tokens = torch.LongTensor([[w1, w2, eos], [w1, w2, eos]])
src_lengths = torch.LongTensor([2, 2])
args = argparse.Namespace()
unk = 0.0
args.beam_probs = [
# step 0:
torch.FloatTensor(
[
# eos w1 w2
# sentence 1:
[0.0, unk, 0.9, 0.1], # beam 1
[0.0, unk, 0.9, 0.1], # beam 2
# sentence 2:
[0.0, unk, 0.7, 0.3],
[0.0, unk, 0.7, 0.3],
]
),
# step 1:
torch.FloatTensor(
[
# eos w1 w2 prefix
# sentence 1:
[1.0, unk, 0.0, 0.0], # w1: 0.9 (emit: w1 <eos>: 0.9*1.0)
[0.0, unk, 0.9, 0.1], # w2: 0.1
# sentence 2:
[0.25, unk, 0.35, 0.4], # w1: 0.7 (don't emit: w1 <eos>: 0.7*0.25)
[0.00, unk, 0.10, 0.9], # w2: 0.3
]
),
# step 2:
torch.FloatTensor(
[
# eos w1 w2 prefix
# sentence 1:
[0.0, unk, 0.1, 0.9], # w2 w1: 0.1*0.9
[
0.6,
unk,
0.2,
0.2,
], # w2 w2: 0.1*0.1 (emit: w2 w2 <eos>: 0.1*0.1*0.6)
# sentence 2:
[
0.60,
unk,
0.4,
0.00,
], # w1 w2: 0.7*0.4 (emit: w1 w2 <eos>: 0.7*0.4*0.6)
[0.01, unk, 0.0, 0.99], # w2 w2: 0.3*0.9
]
),
# step 3:
torch.FloatTensor(
[
# eos w1 w2 prefix
# sentence 1:
[
1.0,
unk,
0.0,
0.0,
], # w2 w1 w2: 0.1*0.9*0.9 (emit: w2 w1 w2 <eos>: 0.1*0.9*0.9*1.0)
[
1.0,
unk,
0.0,
0.0,
], # w2 w1 w1: 0.1*0.9*0.1 (emit: w2 w1 w1 <eos>: 0.1*0.9*0.1*1.0)
# sentence 2:
[
0.1,
unk,
0.5,
0.4,
], # w2 w2 w2: 0.3*0.9*0.99 (emit: w2 w2 w2 <eos>: 0.3*0.9*0.99*0.1)
[
1.0,
unk,
0.0,
0.0,
], # w1 w2 w1: 0.7*0.4*0.4 (emit: w1 w2 w1 <eos>: 0.7*0.4*0.4*1.0)
]
),
]
task = TestTranslationTask.setup_task(args, d, d)
model = task.build_model(args)
tgt_dict = task.target_dictionary
return tgt_dict, w1, w2, src_tokens, src_lengths, model
def create_dummy_data(data_dir, num_examples=100, maxlen=20, alignment=False):
def _create_dummy_data(filename):
data = torch.rand(num_examples * maxlen)
data = 97 + torch.floor(26 * data).int()
with open(os.path.join(data_dir, filename), "w") as h:
offset = 0
for _ in range(num_examples):
ex_len = random.randint(1, maxlen)
ex_str = " ".join(map(chr, data[offset : offset + ex_len]))
print(ex_str, file=h)
offset += ex_len
def _create_dummy_alignment_data(filename_src, filename_tgt, filename):
with open(os.path.join(data_dir, filename_src), "r") as src_f, open(
os.path.join(data_dir, filename_tgt), "r"
) as tgt_f, open(os.path.join(data_dir, filename), "w") as h:
for src, tgt in zip(src_f, tgt_f):
src_len = len(src.split())
tgt_len = len(tgt.split())
avg_len = (src_len + tgt_len) // 2
num_alignments = random.randint(avg_len // 2, 2 * avg_len)
src_indices = torch.floor(torch.rand(num_alignments) * src_len).int()
tgt_indices = torch.floor(torch.rand(num_alignments) * tgt_len).int()
ex_str = " ".join(
[
"{}-{}".format(src, tgt)
for src, tgt in zip(src_indices, tgt_indices)
]
)
print(ex_str, file=h)
_create_dummy_data("train.in")
_create_dummy_data("train.out")
_create_dummy_data("valid.in")
_create_dummy_data("valid.out")
_create_dummy_data("test.in")
_create_dummy_data("test.out")
if alignment:
_create_dummy_alignment_data("train.in", "train.out", "train.align")
_create_dummy_alignment_data("valid.in", "valid.out", "valid.align")
_create_dummy_alignment_data("test.in", "test.out", "test.align")
def preprocess_lm_data(data_dir):
preprocess_parser = options.get_preprocessing_parser()
preprocess_args = preprocess_parser.parse_args(
[
"--only-source",
"--trainpref",
os.path.join(data_dir, "train.out"),
"--validpref",
os.path.join(data_dir, "valid.out"),
"--testpref",
os.path.join(data_dir, "test.out"),
"--destdir",
data_dir,
]
)
preprocess.main(preprocess_args)
def preprocess_translation_data(data_dir, extra_flags=None):
preprocess_parser = options.get_preprocessing_parser()
preprocess_args = preprocess_parser.parse_args(
[
"--source-lang",
"in",
"--target-lang",
"out",
"--trainpref",
os.path.join(data_dir, "train"),
"--validpref",
os.path.join(data_dir, "valid"),
"--testpref",
os.path.join(data_dir, "test"),
"--thresholdtgt",
"0",
"--thresholdsrc",
"0",
"--destdir",
data_dir,
]
+ (extra_flags or []),
)
preprocess.main(preprocess_args)
def preprocess_summarization_data(data_dir, extra_flags=None):
preprocess_parser = options.get_preprocessing_parser()
preprocess_args = preprocess_parser.parse_args(
[
"--source-lang",
"in",
"--target-lang",
"out",
"--trainpref",
os.path.join(data_dir, "train"),
"--validpref",
os.path.join(data_dir, "valid"),
"--testpref",
os.path.join(data_dir, "test"),
"--thresholdtgt",
"0",
"--thresholdsrc",
"0",
"--joined-dictionary",
"--destdir",
data_dir,
]
+ (extra_flags or []),
)
preprocess.main(preprocess_args)
def create_laser_data_and_config_json(data_dir):
src_langs = ["de", "fr", "ru", "tr", "zh"]
tgt_langs = ["en", "es"]
config_json = {}
config_train_json = []
src_vocab = None
tgt_vocab = None
for src_lang in src_langs:
for tgt_lang in tgt_langs:
langpair_folder = f"{src_lang}-{tgt_lang}"
langpair_path = os.path.join(data_dir, langpair_folder)
os.mkdir(langpair_path)
create_dummy_data(langpair_path)
preprocess_translation_data(langpair_path, ["--dataset-impl", "cached"])
src_vocab = os.path.join(langpair_path, "dict.in.txt")
tgt_vocab = os.path.join(langpair_path, "dict.out.txt")
config_train_json.append(
{
"id": 0 if tgt_lang == "en" else 1,
"src": os.path.join(langpair_path, "train.in-out.in"),
"tgt": os.path.join(langpair_path, "train.in-out.out"),
}
)
config_json["src_vocab"] = src_vocab
config_json["tgt_vocab"] = tgt_vocab
config_json["train"] = config_train_json
with open(os.path.join(data_dir, "laserconfig.json"), "w") as config_file:
json.dump(config_json, config_file)
return config_file
def train_translation_model(
data_dir,
arch,
extra_flags=None,
task="translation",
run_validation=False,
lang_flags=None,
extra_valid_flags=None,
):
if lang_flags is None:
lang_flags = [
"--source-lang",
"in",
"--target-lang",
"out",
]
train_parser = options.get_training_parser()
train_args = options.parse_args_and_arch(
train_parser,
[
"--task",
task,
data_dir,
"--save-dir",
data_dir,
"--arch",
arch,
"--optimizer",
"nag",
"--lr",
"0.05",
"--max-tokens",
"500",
"--max-epoch",
"1",
"--no-progress-bar",
"--distributed-world-size",
"1",
"--num-workers",
"0",
]
+ lang_flags
+ (extra_flags or []),
)
train.main(train_args)
if run_validation:
# test validation
validate_parser = options.get_validation_parser()
validate_args = options.parse_args_and_arch(
validate_parser,
[
"--task",
task,
data_dir,
"--path",
os.path.join(data_dir, "checkpoint_last.pt"),
"--valid-subset",
"valid",
"--max-tokens",
"500",
"--no-progress-bar",
"--num-workers",
"0",
]
+ lang_flags
+ (extra_valid_flags or []),
)
validate.main(validate_args)
def generate_main(data_dir, extra_flags=None, path=None):
if extra_flags is None:
extra_flags = [
"--print-alignment",
]
if path is None:
path = os.path.join(data_dir, "checkpoint_last.pt")
generate_parser = options.get_generation_parser()
generate_args = options.parse_args_and_arch(
generate_parser,
[
data_dir,
"--path",
path,
"--beam",
"3",
"--batch-size",
"64",
"--max-len-b",
"5",
"--gen-subset",
"valid",
"--no-progress-bar",
"--num-workers",
"0",
]
+ (extra_flags or []),
)
# evaluate model in batch mode
generate.main(generate_args)
# evaluate model interactively
generate_args.buffer_size = 0
generate_args.input = "-"
generate_args.batch_size = None
orig_stdin = sys.stdin
sys.stdin = StringIO("h e l l o\n")
interactive.main(generate_args)
sys.stdin = orig_stdin
class TestDataset(torch.utils.data.Dataset):
def __init__(self, data):
super().__init__()
self.data = data
self.sizes = None
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class TestTranslationTask(LegacyFairseqTask):
def __init__(self, args, src_dict, tgt_dict, model):
super().__init__(args)
self.src_dict = src_dict
self.tgt_dict = tgt_dict
self.model = model
@classmethod
def setup_task(cls, args, src_dict=None, tgt_dict=None, model=None):
return cls(args, src_dict, tgt_dict, model)
def build_model(self, args):
return TestModel.build_model(args, self)
@property
def source_dictionary(self):
return self.src_dict
@property
def target_dictionary(self):
return self.tgt_dict
class TestModel(FairseqEncoderDecoderModel):
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
@classmethod
def build_model(cls, args, task):
encoder = TestEncoder(args, task.source_dictionary)
decoder = TestIncrementalDecoder(args, task.target_dictionary)
return cls(encoder, decoder)
class TestEncoder(FairseqEncoder):
def __init__(self, args, dictionary):
super().__init__(dictionary)
self.args = args
def forward(self, src_tokens, src_lengths=None, **kwargs):
return EncoderOut(
encoder_out=src_tokens,
encoder_padding_mask=None,
encoder_embedding=None,
encoder_states=None,
src_tokens=None,
src_lengths=None,
)
def reorder_encoder_out(self, encoder_out, new_order):
return EncoderOut(
encoder_out=encoder_out.encoder_out.index_select(0, new_order),
encoder_padding_mask=None,
encoder_embedding=None,
encoder_states=None,
src_tokens=None,
src_lengths=None,
)
class TestIncrementalDecoder(FairseqIncrementalDecoder):
def __init__(self, args, dictionary):
super().__init__(dictionary)
assert hasattr(args, "beam_probs") or hasattr(args, "probs")
args.max_decoder_positions = getattr(args, "max_decoder_positions", 100)
self.args = args
def forward(self, prev_output_tokens, encoder_out=None, incremental_state=None):
if incremental_state is not None:
prev_output_tokens = prev_output_tokens[:, -1:]
bbsz = prev_output_tokens.size(0)
vocab = len(self.dictionary)
src_len = encoder_out.encoder_out.size(1)
tgt_len = prev_output_tokens.size(1)
# determine number of steps
if incremental_state is not None:
# cache step number
step = utils.get_incremental_state(self, incremental_state, "step")
if step is None:
step = 0
utils.set_incremental_state(self, incremental_state, "step", step + 1)
steps = [step]
else:
steps = list(range(tgt_len))
# define output in terms of raw probs
if hasattr(self.args, "probs"):
assert (
self.args.probs.dim() == 3
), "expected probs to have size bsz*steps*vocab"
probs = self.args.probs.index_select(1, torch.LongTensor(steps))
else:
probs = torch.FloatTensor(bbsz, len(steps), vocab).zero_()
for i, step in enumerate(steps):
# args.beam_probs gives the probability for every vocab element,
# starting with eos, then unknown, and then the rest of the vocab
if step < len(self.args.beam_probs):
probs[:, i, self.dictionary.eos() :] = self.args.beam_probs[step]
else:
probs[:, i, self.dictionary.eos()] = 1.0
# random attention
attn = torch.rand(bbsz, tgt_len, src_len)
dev = prev_output_tokens.device
return probs.to(dev), {"attn": [attn.to(dev)]}
def get_normalized_probs(self, net_output, log_probs, _):
# the decoder returns probabilities directly
probs = net_output[0]
if log_probs:
return probs.log()
else:
return probs
def max_positions(self):
return self.args.max_decoder_positions
class TestReshapingEncoder(FairseqEncoder):
def __init__(self, args, dictionary):
super().__init__(dictionary)
self.args = args
def forward(self, src_tokens, src_lengths=None, **kwargs):
b_sz, t_sz = src_tokens.shape
padding_needed = t_sz % 2
x = src_tokens
if padding_needed > 0:
padding_needed = 2 - padding_needed
x = F.pad(x, (0, padding_needed))
return EncoderOut(
encoder_out=x.view(b_sz, -1, 2),
encoder_padding_mask=None,
encoder_embedding=None,
encoder_states=None,
src_tokens=None,
src_lengths=None,
)
def reorder_encoder_out(self, encoder_out, new_order):
return EncoderOut(
encoder_out=encoder_out.encoder_out.index_select(0, new_order),
encoder_padding_mask=None,
encoder_embedding=None,
encoder_states=None,
src_tokens=None,
src_lengths=None,
)
class TestReshapingModel(FairseqEncoderDecoderModel):
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
@classmethod
def build_model(cls, args, task):
encoder = TestReshapingEncoder(args, task.source_dictionary)
decoder = TestIncrementalDecoder(args, task.target_dictionary)
return cls(encoder, decoder)
class TestAdditionalInputEncoder(FairseqEncoder):
def __init__(self, args, dictionary):
super().__init__(dictionary)
self.args = args
def forward(self, src_tokens, src_lengths=None, **kwargs):
assert "fancy_other_input" in kwargs
assert kwargs["fancy_other_input"] is not None
return EncoderOut(
encoder_out=src_tokens,
encoder_padding_mask=None,
encoder_embedding=None,
encoder_states=None,
src_tokens=None,
src_lengths=None,
)
def reorder_encoder_out(self, encoder_out, new_order):
return EncoderOut(
encoder_out=encoder_out.encoder_out.index_select(0, new_order),
encoder_padding_mask=None,
encoder_embedding=None,
encoder_states=None,
src_tokens=None,
src_lengths=None,
)
class TestAdditionalInputModel(FairseqEncoderDecoderModel):
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
@classmethod
def build_model(cls, args, task):
encoder = TestAdditionalInputEncoder(args, task.source_dictionary)
decoder = TestIncrementalDecoder(args, task.target_dictionary)
return cls(encoder, decoder)
def forward(self, src_tokens, src_lengths, prev_output_tokens, **kwargs):
encoder_out = self.encoder(src_tokens, src_lengths=src_lengths, **kwargs)
decoder_out = self.decoder(
prev_output_tokens, encoder_out=encoder_out, **kwargs
)
return decoder_out
|
COCO-LM/fairseq/tests/utils.py/0
|
{
"file_path": "COCO-LM/fairseq/tests/utils.py",
"repo_id": "COCO-LM",
"token_count": 10743
}
| 205 |
#!/bin/bash
pip install --user bcolz mxnet tensorboardX matplotlib easydict opencv-python einops --no-cache-dir -U | cat
pip install --user scikit-image imgaug PyTurboJPEG --no-cache-dir -U | cat
pip install --user scikit-learn --no-cache-dir -U | cat
pip install torch==1.7.1+cu110 torchvision==0.8.2+cu110 -f https://download.pytorch.org/whl/torch_stable.html --no-cache-dir -U | cat
pip install --user termcolor imgaug prettytable --no-cache-dir -U | cat
pip install --user timm==0.3.4 --no-cache-dir -U | cat
pip install mmcv-full==1.3.0 --user --no-cache-dir -U | cat
|
CSWin-Transformer/segmentation/install_req.sh/0
|
{
"file_path": "CSWin-Transformer/segmentation/install_req.sh",
"repo_id": "CSWin-Transformer",
"token_count": 225
}
| 206 |
default_language_version:
python: python3
ci:
autofix_prs: true
autoupdate_commit_msg: "[pre-commit.ci] pre-commit suggestions"
autoupdate_schedule: quarterly
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
# list of supported hooks: https://pre-commit.com/hooks.html
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-case-conflict
- id: debug-statements
- id: detect-private-key
- id: check-added-large-files
args: ["--maxkb=5000", "--enforce-all"]
exclude: |
(?x)^(
)$
- repo: https://github.com/asottile/pyupgrade
rev: v2.37.1
hooks:
- id: pyupgrade
args: [--py37-plus]
name: Upgrade code
# python formatting
- repo: https://github.com/psf/black
rev: 22.6.0
hooks:
- id: black
name: Format code
args: ["--line-length=120"]
- repo: https://github.com/hadialqattan/pycln
rev: v2.0.4 # Possible releases: https://github.com/hadialqattan/pycln/releases
hooks:
- id: pycln
args: [--all]
# ref: https://github.com/microsoft/vscode-isort]
- repo: https://github.com/pycqa/isort
rev: 5.10.1
hooks:
- id: isort
name: isort (python)
args: [--profile, "black"]
|
ClimaX/.pre-commit-config.yaml/0
|
{
"file_path": "ClimaX/.pre-commit-config.yaml",
"repo_id": "ClimaX",
"token_count": 616
}
| 207 |
datadir: /data/CMIP6/AWI-ESM
name: 10m_v_component_of_wind
cmip_name: vas
era_name: v10
run: r1i1p1f1
res:
- 1.40625
# - 5.625
|
ClimaX/snakemake_configs/AWI-ESM/config_10m_v_component_of_wind.yml/0
|
{
"file_path": "ClimaX/snakemake_configs/AWI-ESM/config_10m_v_component_of_wind.yml",
"repo_id": "ClimaX",
"token_count": 71
}
| 208 |
datadir: /data/CMIP6/HAMMOZ
name: geopotential
cmip_name: zg
era_name: z
run: r1i1p1f1
version: v20190628
res:
- 1.40625
# - 5.625
|
ClimaX/snakemake_configs/HAMMOZ/config_geopotential.yml/0
|
{
"file_path": "ClimaX/snakemake_configs/HAMMOZ/config_geopotential.yml",
"repo_id": "ClimaX",
"token_count": 70
}
| 209 |
datadir: /data/CMIP6/TaiESM1
server_prefix: https://esgf.ceda.ac.uk/thredds/fileServer/esg_cmip6/CMIP6/CMIP
name: geopotential
cmip_name: zg
era_name: z
run: r1i1p1f1
res:
- 1.40625
# - 5.625
|
ClimaX/snakemake_configs/TaiESM1/config_geopotential.yml/0
|
{
"file_path": "ClimaX/snakemake_configs/TaiESM1/config_geopotential.yml",
"repo_id": "ClimaX",
"token_count": 102
}
| 210 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
from climax.global_forecast.datamodule import GlobalForecastDataModule
from climax.global_forecast.module import GlobalForecastModule
from pytorch_lightning.cli import LightningCLI
def main():
# Initialize Lightning with the model and data modules, and instruct it to parse the config yml
cli = LightningCLI(
model_class=GlobalForecastModule,
datamodule_class=GlobalForecastDataModule,
seed_everything_default=42,
save_config_overwrite=True,
run=False,
auto_registry=True,
parser_kwargs={"parser_mode": "omegaconf", "error_handler": None},
)
os.makedirs(cli.trainer.default_root_dir, exist_ok=True)
normalization = cli.datamodule.output_transforms
mean_norm, std_norm = normalization.mean, normalization.std
mean_denorm, std_denorm = -mean_norm / std_norm, 1 / std_norm
cli.model.set_denormalization(mean_denorm, std_denorm)
cli.model.set_lat_lon(*cli.datamodule.get_lat_lon())
cli.model.set_pred_range(cli.datamodule.hparams.predict_range)
cli.model.set_val_clim(cli.datamodule.val_clim)
cli.model.set_test_clim(cli.datamodule.test_clim)
# fit() runs the training
cli.trainer.fit(cli.model, datamodule=cli.datamodule)
# test the trained model
cli.trainer.test(cli.model, datamodule=cli.datamodule, ckpt_path="best")
if __name__ == "__main__":
main()
|
ClimaX/src/climax/global_forecast/train.py/0
|
{
"file_path": "ClimaX/src/climax/global_forecast/train.py",
"repo_id": "ClimaX",
"token_count": 579
}
| 211 |
import glob
import os
import click
import numpy as np
import xarray as xr
from tqdm import tqdm
from climax.utils.data_utils import DEFAULT_PRESSURE_LEVELS, NAME_TO_VAR
def extract_one_year(path, year, variables, len_to_extract, np_vars, normalize_mean, normalize_std):
for var in variables:
ps = glob.glob(os.path.join(path, var, f"*{year}*.nc"))
ds = xr.open_mfdataset(ps, combine="by_coords", parallel=True) # dataset for a single variable
code = NAME_TO_VAR[var]
lat = ds.lat.values
lon = ds.lon.values
if len(ds[code].shape) == 3: # surface level variables
ds[code] = ds[code].expand_dims("val", axis=1)
# remove the last 24 hours if this year has 366 days
np_vars[var] = ds[code].to_numpy()[:len_to_extract]
if len(np_vars[var]) < len_to_extract:
n_missing_data = len_to_extract - len(np_vars[var])
np_vars[var] = np.concatenate((np_vars[var], np_vars[var][-n_missing_data:]), axis=0)
var_mean_yearly = np_vars[var].mean(axis=(0, 2, 3))
var_std_yearly = np_vars[var].std(axis=(0, 2, 3))
if var not in normalize_mean:
normalize_mean[var] = [var_mean_yearly]
normalize_std[var] = [var_std_yearly]
else:
normalize_mean[var].append(var_mean_yearly)
normalize_std[var].append(var_std_yearly)
else: # multiple-level variables, only use a subset
assert len(ds[code].shape) == 4
all_levels = ds["plev"][:].to_numpy() / 100 # 92500 --> 925
all_levels = all_levels.astype(int)
all_levels = np.intersect1d(all_levels, DEFAULT_PRESSURE_LEVELS)
for level in all_levels:
ds_level = ds.sel(plev=[level * 100.0])
# level = int(level / 100) # 92500 --> 925
# remove the last 24 hours if this year has 366 days
np_vars[f"{var}_{level}"] = ds_level[code].to_numpy()[:len_to_extract]
if len(np_vars[f"{var}_{level}"]) < len_to_extract:
n_missing_data = len_to_extract - len(np_vars[f"{var}_{level}"])
np_vars[f"{var}_{level}"] = np.concatenate((np_vars[f"{var}_{level}"], np_vars[f"{var}_{level}"][-n_missing_data:]), axis=0)
var_mean_yearly = np_vars[f"{var}_{level}"].mean(axis=(0, 2, 3))
var_std_yearly = np_vars[f"{var}_{level}"].std(axis=(0, 2, 3))
if var not in normalize_mean:
normalize_mean[f"{var}_{level}"] = [var_mean_yearly]
normalize_std[f"{var}_{level}"] = [var_std_yearly]
else:
normalize_mean[f"{var}_{level}"].append(var_mean_yearly)
normalize_std[f"{var}_{level}"].append(var_std_yearly)
return np_vars, normalize_mean, normalize_std, lat, lon
def aggregate_mean_std(normalize_mean, normalize_std):
for var in normalize_mean.keys():
normalize_mean[var] = np.stack(normalize_mean[var], axis=0)
normalize_std[var] = np.stack(normalize_std[var], axis=0)
mean, std = normalize_mean[var], normalize_std[var]
# var(X) = E[var(X|Y)] + var(E[X|Y])
variance = (std**2).mean(axis=0) + (mean**2).mean(axis=0) - mean.mean(axis=0) ** 2
std = np.sqrt(variance)
# E[X] = E[E[X|Y]]
mean = mean.mean(axis=0)
normalize_mean[var] = mean
normalize_std[var] = std
return normalize_mean, normalize_std
def nc2np(dataset, path, variables, years, hours_per_year, num_shards_per_year, save_dir):
os.makedirs(os.path.join(save_dir, "train"), exist_ok=True)
normalize_mean = {}
normalize_std = {}
lat, lon = None, None
for year in tqdm(years):
np_vars = {}
len_to_extract = hours_per_year
if year == '201001010600-201501010000' and (dataset == 'hammoz' or dataset == 'tai'): # special case, only 7304 points
len_to_extract = 7300
else:
len_to_extract = hours_per_year
np_vars, normalize_mean, normalize_std, lat, lon = extract_one_year(
path,
year,
variables,
len_to_extract,
np_vars,
normalize_mean,
normalize_std
)
if lat is None or lon is None:
lat = lat
lon = lon
num_shards = num_shards_per_year
if year == '201001010600-201501010000' and dataset == 'tai': # only 7300 points
num_shards = num_shards // 2
if year == '201001010600-201501010000' and dataset == 'hammoz':
num_shards = num_shards // 4
assert len_to_extract % num_shards == 0
num_hrs_per_shard = len_to_extract // num_shards
for shard_id in range(num_shards):
start_id = shard_id * num_hrs_per_shard
end_id = start_id + num_hrs_per_shard
sharded_data = {k: np_vars[k][start_id:end_id] for k in np_vars.keys()}
np.savez(
os.path.join(save_dir, "train", f"{year}_{shard_id}.npz"),
**sharded_data,
)
normalize_mean, normalize_std = aggregate_mean_std(normalize_mean, normalize_std)
np.savez(os.path.join(save_dir, "normalize_mean.npz"), **normalize_mean)
np.savez(os.path.join(save_dir, "normalize_std.npz"), **normalize_std)
np.save(os.path.join(save_dir, "lat.npy"), lat)
np.save(os.path.join(save_dir, "lon.npy"), lon)
@click.command()
@click.option("--dataset", type=str, default='mpi')
@click.option("--path", type=click.Path(exists=True))
@click.option("--num_shards", type=int, default=10) ## recommended: 10 shards for MPI, 20 for tai, 2 for awi, 40 for hammoz, 2 for cmcc (must keep the same ratio to be able to train on multi gpus)
@click.option("--save_dir", type=click.Path(exists=False))
def main(
dataset,
path,
num_shards,
save_dir
):
os.makedirs(save_dir, exist_ok=True)
if dataset == 'mpi':
hours_per_year = 7300
year_strings = [f"{y}01010600-{y+5}01010000" for y in range(1850, 2015, 5)]
variables = [
"2m_temperature",
"10m_u_component_of_wind",
"10m_v_component_of_wind",
"geopotential",
"specific_humidity",
"temperature",
"u_component_of_wind",
"v_component_of_wind",
]
elif dataset == 'tai':
hours_per_year = 14600
year_strings = [
'185001010000-186001010000',
'186001010600-187001010000',
'187001010600-188001010000',
'188001010600-189001010000',
'189001010600-190001010000',
'190001010600-191001010000',
'191001010600-192001010000',
'192001010600-193001010000',
'193001010600-194001010000',
'194001020000-195001010000',
'195001010600-196001010000',
'196001010600-197001010000',
'197001010600-198001010000',
'198001010600-199001010000',
'199001010600-200001010000',
'200001010600-201001010000',
'201001010600-201501010000'
]
variables = [
"2m_temperature",
"geopotential",
"specific_humidity",
"temperature",
"u_component_of_wind",
"v_component_of_wind",
]
elif dataset == 'awi':
hours_per_year = 1460
year_strings = [f'{y}01010600-{y+1}01010000' for y in range(1850, 2015, 1)]
variables = [
"2m_temperature",
"10m_u_component_of_wind",
"10m_v_component_of_wind",
"geopotential",
"specific_humidity",
"temperature",
"u_component_of_wind",
"v_component_of_wind",
]
elif dataset == 'hammoz':
hours_per_year = 29200
year_strings = [
'185001010600-187001010000',
'187001010600-189001010000',
'189001010600-191001010000',
'191001010600-193001010000',
'193001010600-195001010000',
'195001010600-197001010000',
'197001010600-199001010000',
'199001010600-201001010000',
'201001010600-201501010000'
]
variables = [
"2m_temperature",
"10m_u_component_of_wind",
"10m_v_component_of_wind",
"geopotential",
"specific_humidity",
"temperature",
"u_component_of_wind",
"v_component_of_wind",
]
elif dataset == 'cmcc':
hours_per_year = 1460
year_strings = [f'{y}01010600-{y+1}01010000' for y in range(1850, 2015, 1)]
variables = [
"geopotential",
"temperature",
"u_component_of_wind",
"v_component_of_wind",
]
else:
raise NotImplementedError(f'{dataset} is not supported')
assert hours_per_year % num_shards == 0
nc2np(
dataset=dataset,
path=path,
variables=variables,
years=year_strings,
hours_per_year=hours_per_year,
num_shards_per_year=num_shards,
save_dir=save_dir
)
if __name__ == "__main__":
main()
|
ClimaX/src/data_preprocessing/nc2np_equally_cmip6.py/0
|
{
"file_path": "ClimaX/src/data_preprocessing/nc2np_equally_cmip6.py",
"repo_id": "ClimaX",
"token_count": 4993
}
| 212 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import torch
from models.networks.base_network import BaseNetwork
from models.networks.loss import *
from models.networks.discriminator import *
from models.networks.generator import *
from models.networks.ContextualLoss import *
from models.networks.correspondence import *
from models.networks.ops import *
import util.util as util
def find_network_using_name(target_network_name, filename, add=True):
target_class_name = target_network_name + filename if add else target_network_name
module_name = 'models.networks.' + filename
network = util.find_class_in_module(target_class_name, module_name)
assert issubclass(network, BaseNetwork), \
"Class %s should be a subclass of BaseNetwork" % network
return network
def modify_commandline_options(parser, is_train):
opt, _ = parser.parse_known_args()
netG_cls = find_network_using_name(opt.netG, 'generator')
parser = netG_cls.modify_commandline_options(parser, is_train)
if is_train:
netD_cls = find_network_using_name(opt.netD, 'discriminator')
parser = netD_cls.modify_commandline_options(parser, is_train)
return parser
def create_network(cls, opt):
net = cls(opt)
net.print_network()
if len(opt.gpu_ids) > 0:
assert(torch.cuda.is_available())
net.cuda()
net.init_weights(opt.init_type, opt.init_variance)
return net
def define_G(opt):
netG_cls = find_network_using_name(opt.netG, 'generator')
return create_network(netG_cls, opt)
def define_D(opt):
netD_cls = find_network_using_name(opt.netD, 'discriminator')
return create_network(netD_cls, opt)
def define_Corr(opt):
netCoor_cls = find_network_using_name(opt.netCorr, 'correspondence')
return create_network(netCoor_cls, opt)
|
CoCosNet-v2/models/networks/__init__.py/0
|
{
"file_path": "CoCosNet-v2/models/networks/__init__.py",
"repo_id": "CoCosNet-v2",
"token_count": 679
}
| 213 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import torch
import torch.nn.functional as F
import models.networks as networks
import util.util as util
class Pix2PixModel(torch.nn.Module):
@staticmethod
def modify_commandline_options(parser, is_train):
networks.modify_commandline_options(parser, is_train)
return parser
def __init__(self, opt):
super().__init__()
self.opt = opt
self.FloatTensor = torch.cuda.FloatTensor if self.use_gpu() \
else torch.FloatTensor
self.ByteTensor = torch.cuda.ByteTensor if self.use_gpu() \
else torch.ByteTensor
self.alpha = 1
self.net = torch.nn.ModuleDict(self.initialize_networks(opt))
# set loss functions
if opt.isTrain:
self.vggnet_fix = networks.correspondence.VGG19_feature_color_torchversion(vgg_normal_correct=opt.vgg_normal_correct)
self.vggnet_fix.load_state_dict(torch.load('models/vgg19_conv.pth'))
self.vggnet_fix.eval()
for param in self.vggnet_fix.parameters():
param.requires_grad = False
self.vggnet_fix.to(self.opt.gpu_ids[0])
self.contextual_forward_loss = networks.ContextualLoss_forward(opt)
self.criterionGAN = networks.GANLoss(
opt.gan_mode, tensor=self.FloatTensor, opt=self.opt)
self.criterionFeat = torch.nn.L1Loss()
self.MSE_loss = torch.nn.MSELoss()
if opt.which_perceptual == '5_2':
self.perceptual_layer = -1
elif opt.which_perceptual == '4_2':
self.perceptual_layer = -2
# Entry point for all calls involving forward pass
# of deep networks. We used this approach since DataParallel module
# can't parallelize custom functions, we branch to different
# routines based on |mode|.
def forward(self, data, mode, GforD=None, alpha=1):
input_label, input_semantics, real_image, self_ref, ref_image, ref_label, ref_semantics = self.preprocess_input(data, )
self.alpha = alpha
generated_out = {}
if mode == 'generator':
g_loss, generated_out = self.compute_generator_loss(input_label,
input_semantics, real_image, ref_label, ref_semantics, ref_image, self_ref)
out = {}
out['fake_image'] = generated_out['fake_image']
out['input_semantics'] = input_semantics
out['ref_semantics'] = ref_semantics
out['warp_out'] = None if 'warp_out' not in generated_out else generated_out['warp_out']
out['warp_mask'] = None if 'warp_mask' not in generated_out else generated_out['warp_mask']
out['adaptive_feature_seg'] = None if 'adaptive_feature_seg' not in generated_out else generated_out['adaptive_feature_seg']
out['adaptive_feature_img'] = None if 'adaptive_feature_img' not in generated_out else generated_out['adaptive_feature_img']
out['warp_cycle'] = None if 'warp_cycle' not in generated_out else generated_out['warp_cycle']
out['warp_i2r'] = None if 'warp_i2r' not in generated_out else generated_out['warp_i2r']
out['warp_i2r2i'] = None if 'warp_i2r2i' not in generated_out else generated_out['warp_i2r2i']
return g_loss, out
elif mode == 'discriminator':
d_loss = self.compute_discriminator_loss(
input_semantics, real_image, GforD, label=input_label)
return d_loss
elif mode == 'inference':
out = {}
with torch.no_grad():
out = self.inference(input_semantics,
ref_semantics=ref_semantics, ref_image=ref_image, self_ref=self_ref)
out['input_semantics'] = input_semantics
out['ref_semantics'] = ref_semantics
return out
else:
raise ValueError("|mode| is invalid")
def create_optimizers(self, opt):
G_params, D_params = list(), list()
G_params += [{'params': self.net['netG'].parameters(), 'lr': opt.lr*0.5}]
G_params += [{'params': self.net['netCorr'].parameters(), 'lr': opt.lr*0.5}]
if opt.isTrain:
D_params += list(self.net['netD'].parameters())
if opt.weight_domainC > 0 and opt.domain_rela:
D_params += list(self.net['netDomainClassifier'].parameters())
if opt.no_TTUR:
beta1, beta2 = opt.beta1, opt.beta2
G_lr, D_lr = opt.lr, opt.lr
else:
beta1, beta2 = 0, 0.9
G_lr, D_lr = opt.lr / 2, opt.lr * 2
optimizer_G = torch.optim.Adam(G_params, lr=G_lr, betas=(beta1, beta2), eps=1e-3)
optimizer_D = torch.optim.Adam(D_params, lr=D_lr, betas=(beta1, beta2))
return optimizer_G, optimizer_D
def save(self, epoch):
util.save_network(self.net['netG'], 'G', epoch, self.opt)
util.save_network(self.net['netD'], 'D', epoch, self.opt)
util.save_network(self.net['netCorr'], 'Corr', epoch, self.opt)
if self.opt.weight_domainC > 0 and self.opt.domain_rela:
util.save_network(self.net['netDomainClassifier'], 'DomainClassifier', epoch, self.opt)
############################################################################
# Private helper methods
############################################################################
def initialize_networks(self, opt):
net = {}
net['netG'] = networks.define_G(opt)
net['netD'] = networks.define_D(opt) if opt.isTrain else None
net['netCorr'] = networks.define_Corr(opt)
net['netDomainClassifier'] = networks.define_DomainClassifier(opt) if opt.weight_domainC > 0 and opt.domain_rela else None
if not opt.isTrain or opt.continue_train:
net['netG'] = util.load_network(net['netG'], 'G', opt.which_epoch, opt)
if opt.isTrain:
net['netD'] = util.load_network(net['netD'], 'D', opt.which_epoch, opt)
net['netCorr'] = util.load_network(net['netCorr'], 'Corr', opt.which_epoch, opt)
if opt.weight_domainC > 0 and opt.domain_rela:
net['netDomainClassifier'] = util.load_network(net['netDomainClassifier'], 'DomainClassifier', opt.which_epoch, opt)
if (not opt.isTrain) and opt.use_ema:
net['netG'] = util.load_network(net['netG'], 'G_ema', opt.which_epoch, opt)
net['netCorr'] = util.load_network(net['netCorr'], 'netCorr_ema', opt.which_epoch, opt)
return net
#return netG_stage1, netD_stage1, netG, netD, netE, netCorr
# preprocess the input, such as moving the tensors to GPUs and
# transforming the label map to one-hot encoding
# |data|: dictionary of the input data
def preprocess_input(self, data):
if self.opt.dataset_mode == 'celebahq':
glasses = data['label'][:,1::2,:,:].long()
data['label'] = data['label'][:,::2,:,:]
glasses_ref = data['label_ref'][:,1::2,:,:].long()
data['label_ref'] = data['label_ref'][:,::2,:,:]
if self.use_gpu():
glasses = glasses.cuda()
glasses_ref = glasses_ref.cuda()
elif self.opt.dataset_mode == 'celebahqedge':
input_semantics = data['label'].clone().cuda().float()
data['label'] = data['label'][:,:1,:,:]
ref_semantics = data['label_ref'].clone().cuda().float()
data['label_ref'] = data['label_ref'][:,:1,:,:]
elif self.opt.dataset_mode == 'deepfashion':
input_semantics = data['label'].clone().cuda().float()
data['label'] = data['label'][:,:3,:,:]
ref_semantics = data['label_ref'].clone().cuda().float()
data['label_ref'] = data['label_ref'][:,:3,:,:]
# move to GPU and change data types
if self.opt.dataset_mode != 'deepfashion':
data['label'] = data['label'].long()
if self.use_gpu():
data['label'] = data['label'].cuda()
data['image'] = data['image'].cuda()
data['ref'] = data['ref'].cuda()
data['label_ref'] = data['label_ref'].cuda()
if self.opt.dataset_mode != 'deepfashion':
data['label_ref'] = data['label_ref'].long()
data['self_ref'] = data['self_ref'].cuda()
# create one-hot label map
if self.opt.dataset_mode != 'celebahqedge' and self.opt.dataset_mode != 'deepfashion':
label_map = data['label']
bs, _, h, w = label_map.size()
nc = self.opt.label_nc + 1 if self.opt.contain_dontcare_label \
else self.opt.label_nc
input_label = self.FloatTensor(bs, nc, h, w).zero_()
input_semantics = input_label.scatter_(1, label_map, 1.0)
label_map = data['label_ref']
label_ref = self.FloatTensor(bs, nc, h, w).zero_()
ref_semantics = label_ref.scatter_(1, label_map, 1.0)
if self.opt.dataset_mode == 'celebahq':
assert input_semantics[:,-3:-2,:,:].sum().cpu().item() == 0
input_semantics[:,-3:-2,:,:] = glasses
assert ref_semantics[:,-3:-2,:,:].sum().cpu().item() == 0
ref_semantics[:,-3:-2,:,:] = glasses_ref
return data['label'], input_semantics, data['image'], data['self_ref'], data['ref'], data['label_ref'], ref_semantics
def get_ctx_loss(self, source, target):
contextual_style5_1 = torch.mean(self.contextual_forward_loss(source[-1], target[-1].detach())) * 8
contextual_style4_1 = torch.mean(self.contextual_forward_loss(source[-2], target[-2].detach())) * 4
contextual_style3_1 = torch.mean(self.contextual_forward_loss(F.avg_pool2d(source[-3], 2), F.avg_pool2d(target[-3].detach(), 2))) * 2
if self.opt.use_22ctx:
contextual_style2_1 = torch.mean(self.contextual_forward_loss(F.avg_pool2d(source[-4], 4), F.avg_pool2d(target[-4].detach(), 4))) * 1
return contextual_style5_1 + contextual_style4_1 + contextual_style3_1 + contextual_style2_1
return contextual_style5_1 + contextual_style4_1 + contextual_style3_1
def compute_generator_loss(self, input_label, input_semantics, real_image, ref_label=None, ref_semantics=None, ref_image=None, self_ref=None):
G_losses = {}
generate_out = self.generate_fake(
input_semantics, real_image, ref_semantics=ref_semantics, ref_image=ref_image, self_ref=self_ref)
if 'loss_novgg_featpair' in generate_out and generate_out['loss_novgg_featpair'] is not None:
G_losses['no_vgg_feat'] = generate_out['loss_novgg_featpair']
if self.opt.warp_cycle_w > 0:
if not self.opt.warp_patch:
ref = F.avg_pool2d(ref_image, self.opt.warp_stride)
else:
ref = ref_image
G_losses['G_warp_cycle'] = F.l1_loss(generate_out['warp_cycle'], ref) * self.opt.warp_cycle_w
if self.opt.two_cycle:
real = F.avg_pool2d(real_image, self.opt.warp_stride)
G_losses['G_warp_cycle'] += F.l1_loss(generate_out['warp_i2r2i'], real) * self.opt.warp_cycle_w
if self.opt.warp_self_w > 0:
# real = F.avg_pool2d(real_image, self.opt.warp_stride)
# warp = F.avg_pool2d(generate_out['warp_out'], self.opt.warp_stride)
sample_weights = (self_ref[:, 0, 0, 0] / (sum(self_ref[:, 0, 0, 0]) + 1e-5)).unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)
G_losses['G_warp_self'] = torch.mean(F.l1_loss(generate_out['warp_out'], real_image, reduce=False) * sample_weights) * self.opt.warp_self_w
pred_fake, pred_real, seg, fake_cam_logit, real_cam_logit = self.discriminate(
input_semantics, generate_out['fake_image'], real_image)
G_losses['GAN'] = self.criterionGAN(pred_fake, True,
for_discriminator=False) * self.opt.weight_gan
if not self.opt.no_ganFeat_loss:
num_D = len(pred_fake)
GAN_Feat_loss = self.FloatTensor(1).fill_(0)
for i in range(num_D): # for each discriminator
# last output is the final prediction, so we exclude it
num_intermediate_outputs = len(pred_fake[i]) - 1
for j in range(num_intermediate_outputs): # for each layer output
unweighted_loss = self.criterionFeat(
pred_fake[i][j], pred_real[i][j].detach())
GAN_Feat_loss += unweighted_loss * self.opt.lambda_feat / num_D
G_losses['GAN_Feat'] = GAN_Feat_loss
fake_features = self.vggnet_fix(generate_out['fake_image'], ['r12', 'r22', 'r32', 'r42', 'r52'], preprocess=True)
sample_weights = (self_ref[:, 0, 0, 0] / (sum(self_ref[:, 0, 0, 0]) + 1e-5)).unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)
weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0]
loss = 0
for i in range(len(generate_out['real_features'])):
loss += weights[i] * util.weighted_l1_loss(fake_features[i], generate_out['real_features'][i].detach(), sample_weights)
G_losses['fm'] = loss * self.opt.lambda_vgg * self.opt.fm_ratio
feat_loss = util.mse_loss(fake_features[self.perceptual_layer], generate_out['real_features'][self.perceptual_layer].detach())
G_losses['perc'] = feat_loss * self.opt.weight_perceptual
G_losses['contextual'] = self.get_ctx_loss(fake_features, generate_out['ref_features']) * self.opt.lambda_vgg * self.opt.ctx_w
if self.opt.warp_mask_losstype != 'none':
ref_label = F.interpolate(ref_label.float(), scale_factor=0.25, mode='nearest').long().squeeze(1)
gt_label = F.interpolate(input_label.float(), scale_factor=0.25, mode='nearest').long().squeeze(1)
weights = []
for i in range(ref_label.shape[0]):
ref_label_uniq = torch.unique(ref_label[i])
gt_label_uniq = torch.unique(gt_label[i])
zero_label = [it for it in gt_label_uniq if it not in ref_label_uniq]
weight = torch.ones_like(gt_label[i]).float()
for j in zero_label:
weight[gt_label[i] == j] = 0
weight[gt_label[i] == 0] = 0 #no loss from unknown class
weights.append(weight.unsqueeze(0))
weights = torch.cat(weights, dim=0)
#G_losses['mask'] = (F.cross_entropy(warp_mask, gt_label, reduce =False) * weights.float()).sum() / (weights.sum() + 1e-5) * self.opt.weight_mask
G_losses['mask'] = (F.nll_loss(torch.log(generate_out['warp_mask'] + 1e-10), gt_label, reduce =False) * weights).sum() / (weights.sum() + 1e-5) * self.opt.weight_mask
#self.fake_image = fake_image
return G_losses, generate_out
def compute_discriminator_loss(self, input_semantics, real_image, GforD, label=None):
D_losses = {}
with torch.no_grad():
#fake_image, _, _, _, _ = self.generate_fake(input_semantics, real_image, VGG_feat=False)
fake_image = GforD['fake_image'].detach()
fake_image.requires_grad_()
pred_fake, pred_real, seg, fake_cam_logit, real_cam_logit = self.discriminate(
input_semantics, fake_image, real_image)
D_losses['D_Fake'] = self.criterionGAN(pred_fake, False,
for_discriminator=True) * self.opt.weight_gan
D_losses['D_real'] = self.criterionGAN(pred_real, True,
for_discriminator=True) * self.opt.weight_gan
return D_losses
def encode_z(self, real_image):
mu, logvar = self.net['netE'](real_image)
z = self.reparameterize(mu, logvar)
return z, mu, logvar
def generate_fake(self, input_semantics, real_image, ref_semantics=None, ref_image=None, self_ref=None):
generate_out = {}
#print(ref_image.max())
ref_relu1_1, ref_relu2_1, ref_relu3_1, ref_relu4_1, ref_relu5_1 = self.vggnet_fix(ref_image, ['r12', 'r22', 'r32', 'r42', 'r52'], preprocess=True)
coor_out = self.net['netCorr'](ref_image, real_image, input_semantics, ref_semantics, alpha=self.alpha)
generate_out['ref_features'] = [ref_relu1_1, ref_relu2_1, ref_relu3_1, ref_relu4_1, ref_relu5_1]
generate_out['real_features'] = self.vggnet_fix(real_image, ['r12', 'r22', 'r32', 'r42', 'r52'], preprocess=True)
if self.opt.CBN_intype == 'mask':
CBN_in = input_semantics
elif self.opt.CBN_intype == 'warp':
CBN_in = coor_out['warp_out']
elif self.opt.CBN_intype == 'warp_mask':
CBN_in = torch.cat((coor_out['warp_out'], input_semantics), dim=1)
generate_out['fake_image'] = self.net['netG'](input_semantics, warp_out=CBN_in)
generate_out = {**generate_out, **coor_out}
return generate_out
def inference(self, input_semantics, ref_semantics=None, ref_image=None, self_ref=None):
generate_out = {}
coor_out = self.net['netCorr'](ref_image, None, input_semantics, ref_semantics, alpha=self.alpha)
if self.opt.CBN_intype == 'mask':
CBN_in = input_semantics
elif self.opt.CBN_intype == 'warp':
CBN_in = coor_out['warp_out']
elif self.opt.CBN_intype == 'warp_mask':
CBN_in = torch.cat((coor_out['warp_out'], input_semantics), dim=1)
generate_out['fake_image'] = self.net['netG'](input_semantics, warp_out=CBN_in)
generate_out = {**generate_out, **coor_out}
return generate_out
# Given fake and real image, return the prediction of discriminator
# for each fake and real image.
def discriminate(self, input_semantics, fake_image, real_image):
fake_concat = torch.cat([input_semantics, fake_image], dim=1)
real_concat = torch.cat([input_semantics, real_image], dim=1)
# In Batch Normalization, the fake and real images are
# recommended to be in the same batch to avoid disparate
# statistics in fake and real images.
# So both fake and real images are fed to D all at once.
fake_and_real = torch.cat([fake_concat, real_concat], dim=0)
seg = None
discriminator_out, seg, cam_logit = self.net['netD'](fake_and_real)
pred_fake, pred_real = self.divide_pred(discriminator_out)
fake_cam_logit, real_cam_logit = None, None
if self.opt.D_cam > 0:
fake_cam_logit = torch.cat([it[:it.shape[0]//2] for it in cam_logit], dim=1)
real_cam_logit = torch.cat([it[it.shape[0]//2:] for it in cam_logit], dim=1)
#fake_cam_logit, real_cam_logit = self.divide_pred(cam_logit)
return pred_fake, pred_real, seg, fake_cam_logit, real_cam_logit
# Take the prediction of fake and real images from the combined batch
def divide_pred(self, pred):
# the prediction contains the intermediate outputs of multiscale GAN,
# so it's usually a list
if type(pred) == list:
fake = []
real = []
for p in pred:
fake.append([tensor[:tensor.size(0) // 2] for tensor in p])
real.append([tensor[tensor.size(0) // 2:] for tensor in p])
else:
fake = pred[:pred.size(0) // 2]
real = pred[pred.size(0) // 2:]
return fake, real
def get_edges(self, t):
edge = self.ByteTensor(t.size()).zero_()
edge[:, :, :, 1:] = edge[:, :, :, 1:] | (t[:, :, :, 1:] != t[:, :, :, :-1])
edge[:, :, :, :-1] = edge[:, :, :, :-1] | (t[:, :, :, 1:] != t[:, :, :, :-1])
edge[:, :, 1:, :] = edge[:, :, 1:, :] | (t[:, :, 1:, :] != t[:, :, :-1, :])
edge[:, :, :-1, :] = edge[:, :, :-1, :] | (t[:, :, 1:, :] != t[:, :, :-1, :])
return edge.float()
def reparameterize(self, mu, logvar):
std = torch.exp(0.5 * logvar)
eps = torch.randn_like(std)
return eps.mul(std) + mu
def use_gpu(self):
return len(self.opt.gpu_ids) > 0
def compute_D_seg_loss(self, out, gt):
fake_seg, real_seg = self.divide_pred([out])
fake_seg_loss = F.cross_entropy(fake_seg[0][0], gt)
real_seg_loss = F.cross_entropy(real_seg[0][0], gt)
down_gt = F.interpolate(gt.unsqueeze(1).float(), scale_factor=0.5, mode='nearest').squeeze().long()
fake_seg_loss_down = F.cross_entropy(fake_seg[0][1], down_gt)
real_seg_loss_down = F.cross_entropy(real_seg[0][1], down_gt)
seg_loss = fake_seg_loss + real_seg_loss + fake_seg_loss_down + real_seg_loss_down
return seg_loss
|
CoCosNet/models/pix2pix_model.py/0
|
{
"file_path": "CoCosNet/models/pix2pix_model.py",
"repo_id": "CoCosNet",
"token_count": 9946
}
| 214 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .utils import (remove_comments_and_docstrings,
tree_to_token_index,
index_to_code_token,
tree_to_variable_index)
from .DFG import DFG_python,DFG_java,DFG_ruby,DFG_go,DFG_php,DFG_javascript,DFG_csharp
|
CodeBERT/CodeReviewer/code/evaluator/CodeBLEU/parser/__init__.py/0
|
{
"file_path": "CodeBERT/CodeReviewer/code/evaluator/CodeBLEU/parser/__init__.py",
"repo_id": "CodeBERT",
"token_count": 155
}
| 215 |
import os
import torch
import logging
import argparse
import random
import numpy as np
from tqdm import tqdm
import multiprocessing
import time
from itertools import cycle
from torch.utils.data import DataLoader, SequentialSampler, RandomSampler
from torch.utils.data.distributed import DistributedSampler
from transformers import AdamW, get_linear_schedule_with_warmup
from models import build_or_load_gen_model
from configs import add_args, set_seed, set_dist
from torch.nn.parallel import DistributedDataParallel as DDP
import torch.distributed as dist
from utils import CommentClsDataset, SimpleClsDataset
from sklearn.metrics import classification_report
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
)
logger = logging.getLogger(__name__)
def get_loader(data_file, args, tokenizer, pool):
def fn(features):
return features
logger.info(f"Start data file {data_file}.")
if args.raw_input:
dataset = SimpleClsDataset(tokenizer, pool, args, data_file)
else:
dataset = CommentClsDataset(tokenizer, pool, args, data_file)
sampler = RandomSampler(dataset)
dataloader = DataLoader(dataset, sampler=sampler, batch_size=args.eval_batch_size, num_workers=args.cpu_count, collate_fn=fn)
logger.info(f"Finish data files {data_file}.")
return dataset, sampler, dataloader
def eval_epoch_acc(args, eval_dataloader, model, tokenizer):
# Start evaluating model
logger.info(" " + "***** Running acc evaluation *****")
logger.info(" Batch size = %d", args.eval_batch_size)
model.eval()
local_rank = 0
pred, gold = [], []
with torch.no_grad():
for step, examples in enumerate(tqdm(eval_dataloader), 1):
if step == 1:
ex = examples[0]
logger.info(f"batch size: {len(examples)}")
logger.info(f"example source: {tokenizer.convert_ids_to_tokens(ex.source_ids)}")
logger.info(f"example target: {ex.y}")
source_ids = torch.tensor(
[ex.source_ids for ex in examples], dtype=torch.long
).to(local_rank)
source_mask = source_ids.ne(tokenizer.pad_id)
logits = model(
cls=True,
input_ids=source_ids,
labels=None,
attention_mask=source_mask
)
prediction = torch.argmax(logits, dim=-1).cpu().numpy()
pred.extend(prediction)
gold.extend([ex.y for ex in examples])
logger.info("\n" + classification_report(gold, pred, digits=4))
logger.info(f"Target positive percentage: {sum(gold) / len(gold)}")
return
def main(args):
dist.init_process_group(backend="nccl")
local_rank = dist.get_rank() % args.gpu_per_node
args.global_rank = local_rank + args.node_index * args.gpu_per_node
args.local_rank = local_rank
args.world_size = dist.get_world_size()
logger.warning("Process rank: %s, global rank: %s, world size: %s, bs: %s",
args.local_rank, args.global_rank, \
torch.distributed.get_world_size(), \
args.eval_batch_size)
torch.cuda.set_device(local_rank)
set_seed(args)
config, model, tokenizer = build_or_load_gen_model(args)
model = DDP(model.cuda(), device_ids=[local_rank], output_device=local_rank, find_unused_parameters=True)
pool = multiprocessing.Pool(args.cpu_count)
data_file = args.eval_file
set_seed(args)
_, _, dataloader = get_loader(data_file, args, tokenizer, pool) # WARNING: this is a iterator, to save memory
model.eval()
eval_epoch_acc(args, dataloader, model, tokenizer)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
args = add_args(parser)
args.cpu_count = multiprocessing.cpu_count()
# remove long tokenization warning. ref: https://github.com/huggingface/transformers/issues/991
logging.getLogger("transformers.tokenization_utils_base").setLevel(logging.ERROR)
logger.info(args)
main(args)
logger.info("Test finished.")
# torch.multiprocessing.spawn(main, args=(args,), nprocs=torch.cuda.device_count())
|
CodeBERT/CodeReviewer/code/run_test_cls.py/0
|
{
"file_path": "CodeBERT/CodeReviewer/code/run_test_cls.py",
"repo_id": "CodeBERT",
"token_count": 1766
}
| 216 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import logging
import sys
from sklearn.metrics import recall_score,precision_score,f1_score
def read_answers(filename):
answers={}
with open(filename) as f:
for line in f:
line=line.strip()
idx1,idx2,label=line.split()
answers[(idx1,idx2)]=int(label)
return answers
def read_predictions(filename):
predictions={}
with open(filename) as f:
for line in f:
line=line.strip()
idx1,idx2,label=line.split()
predictions[(idx1,idx2)]=int(label)
return predictions
def calculate_scores(answers,predictions):
y_trues,y_preds=[],[]
for key in answers:
if key not in predictions:
logging.error("Missing prediction for ({},{}) pair.".format(key[0],key[1]))
sys.exit()
y_trues.append(answers[key])
y_preds.append(predictions[key])
scores={}
scores['Recall']=recall_score(y_trues, y_preds)
scores['Prediction']=precision_score(y_trues, y_preds)
scores['F1']=f1_score(y_trues, y_preds)
return scores
def main():
import argparse
parser = argparse.ArgumentParser(description='Evaluate leaderboard predictions for BigCloneBench dataset.')
parser.add_argument('--answers', '-a',help="filename of the labels, in txt format.")
parser.add_argument('--predictions', '-p',help="filename of the leaderboard predictions, in txt format.")
args = parser.parse_args()
answers=read_answers(args.answers)
predictions=read_predictions(args.predictions)
scores=calculate_scores(answers,predictions)
print(scores)
if __name__ == '__main__':
main()
|
CodeBERT/GraphCodeBERT/clonedetection/evaluator/evaluator.py/0
|
{
"file_path": "CodeBERT/GraphCodeBERT/clonedetection/evaluator/evaluator.py",
"repo_id": "CodeBERT",
"token_count": 729
}
| 217 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import torch
import torch.nn as nn
import torch
from torch.autograd import Variable
import copy
import torch.nn.functional as F
from torch.nn import CrossEntropyLoss, MSELoss
class Model(nn.Module):
def __init__(self, encoder,config,tokenizer,args):
super(Model, self).__init__()
self.encoder = encoder
self.config=config
self.tokenizer=tokenizer
self.args=args
def forward(self, input_ids=None,p_input_ids=None,n_input_ids=None,labels=None):
bs,_ = input_ids.size()
input_ids = torch.cat((input_ids,p_input_ids,n_input_ids),0)
outputs = self.encoder(input_ids,attention_mask=input_ids.ne(1))[0]
outputs = (outputs * input_ids.ne(1)[:,:,None]).sum(1)/input_ids.ne(1).sum(1)[:,None]
outputs = torch.nn.functional.normalize(outputs, p=2, dim=1)
outputs = outputs.split(bs,0)
prob_1 = (outputs[0]*outputs[1]).sum(-1)*20
prob_2 = (outputs[0]*outputs[2]).sum(-1)*20
temp = torch.cat((outputs[0],outputs[1]),0)
temp_labels = torch.cat((labels,labels),0)
prob_3 = torch.mm(outputs[0],temp.t())*20
mask = labels[:,None]==temp_labels[None,:]
prob_3 = prob_3*(1-mask.float())-1e9*mask.float()
prob = torch.softmax(torch.cat((prob_1[:,None],prob_2[:,None],prob_3),-1),-1)
loss = torch.log(prob[:,0]+1e-10)
loss = -loss.mean()
return loss,outputs[0]
|
CodeBERT/UniXcoder/downstream-tasks/clone-detection/POJ-104/model.py/0
|
{
"file_path": "CodeBERT/UniXcoder/downstream-tasks/clone-detection/POJ-104/model.py",
"repo_id": "CodeBERT",
"token_count": 751
}
| 218 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from concurrent.futures import as_completed, ProcessPoolExecutor
import numpy as np
import scipy
import tqdm
import os
import copy
import functools
from utils import Tools, FilePathBuilder, CONSTANTS
class SimilarityScore:
@staticmethod
def cosine_similarity(embedding_vec1, embedding_vec2):
return 1 - scipy.spatial.distance.cosine(embedding_vec1, embedding_vec2)
@staticmethod
def jaccard_similarity(list1, list2):
set1 = set(list1)
set2 = set(list2)
intersection = len(set1.intersection(set2))
union = len(set1.union(set2))
return float(intersection) / union
class CodeSearchWorker:
def __init__(self, repo_embedding_lines, query_embedding_lines, output_path, sim_scorer, max_top_k, log_message):
self.repo_embedding_lines = repo_embedding_lines # list
self.query_embedding_lines = query_embedding_lines # list
self.max_top_k = max_top_k
self.sim_scorer = sim_scorer
self.output_path = output_path
self.log_message = log_message
def _is_context_after_hole(self, repo_embedding_line, query_line):
hole_fpath_tuple = tuple(query_line['metadata']['fpath_tuple'])
context_is_not_after_hole = []
for metadata in repo_embedding_line['metadata']:
if tuple(metadata['fpath_tuple']) != hole_fpath_tuple:
context_is_not_after_hole.append(True)
continue
# now we know that the repo line is in the same file as the hole
if metadata['end_line_no'] <= query_line['metadata']['context_start_lineno']:
context_is_not_after_hole.append(True)
continue
context_is_not_after_hole.append(False)
return not any(context_is_not_after_hole)
def _find_top_k_context(self, query_line):
top_k_context = []
query_embedding = np.array(query_line['data'][0]['embedding'])
for repo_embedding_line in self.repo_embedding_lines:
if self._is_context_after_hole(repo_embedding_line, query_line):
continue
repo_line_embedding = np.array(repo_embedding_line['data'][0]['embedding'])
similarity_score = self.sim_scorer(query_embedding, repo_line_embedding)
top_k_context.append((repo_embedding_line, similarity_score))
top_k_context = sorted(top_k_context, key=lambda x: x[1], reverse=False)[-self.max_top_k:]
return top_k_context
def run(self):
query_lines_with_retrieved_results = []
for query_line in self.query_embedding_lines:
new_line = copy.deepcopy(query_line)
top_k_context = self._find_top_k_context(new_line)
new_line['top_k_context'] = top_k_context
query_lines_with_retrieved_results.append(new_line)
Tools.dump_pickle(query_lines_with_retrieved_results, self.output_path)
class CodeSearchWrapper:
def __init__(self, vectorizer, benchmark, repos, window_sizes, slice_sizes):
self.vectorizer = vectorizer
if vectorizer == 'one-gram':
self.sim_scorer = SimilarityScore.jaccard_similarity
self.vector_path_builder = FilePathBuilder.one_gram_vector_path
elif vectorizer == 'ada002':
self.sim_scorer = SimilarityScore.cosine_similarity
self.vector_path_builder = FilePathBuilder.ada002_vector_path
self.max_top_k = 20 # store 20 top k context for the prompt construction (top 10)
self.repos = repos
self.window_sizes = window_sizes
self.slice_sizes = slice_sizes
self.benchmark = benchmark
def _run_parallel(self, query_window_path_builder, prediction_path_template=None):
workers = []
for window_size in self.window_sizes:
for slice_size in self.slice_sizes:
for repo in self.repos:
if prediction_path_template:
query_window_path = query_window_path_builder(
prediction_path_template.format(window_size=window_size, slice_size=slice_size),
repo, window_size
)
else:
query_window_path = query_window_path_builder(repo, window_size)
query_line_path = self.vector_path_builder(query_window_path)
repo_window_path = FilePathBuilder.repo_windows_path(repo, window_size, slice_size)
repo_embedding_path = self.vector_path_builder(repo_window_path)
output_path = FilePathBuilder.retrieval_results_path(query_line_path, repo_embedding_path, self.max_top_k)
repo_embedding_lines = Tools.load_pickle(repo_embedding_path)
query_embedding_lines = Tools.load_pickle(query_line_path)
log_message = f'repo: {repo}, window: {window_size}, slice: {slice_size} {self.vectorizer}, max_top_k: {self.max_top_k}'
worker = CodeSearchWorker(repo_embedding_lines, query_embedding_lines, output_path, self.sim_scorer, self.max_top_k, log_message)
workers.append(worker)
# process pool
with ProcessPoolExecutor(max_workers=os.cpu_count()) as executor:
futures = {executor.submit(worker.run, ) for worker in workers}
for future in tqdm.tqdm(as_completed(futures), total=len(futures)):
future.result()
def search_baseline_and_ground(self):
query_line_path_temp = functools.partial(FilePathBuilder.search_first_window_path, self.benchmark, CONSTANTS.rg)
self._run_parallel(query_line_path_temp)
query_line_path_temp = functools.partial(FilePathBuilder.search_first_window_path, self.benchmark, CONSTANTS.gt)
self._run_parallel(query_line_path_temp)
def search_prediction(self, mode, prediction_path_template):
query_line_path_temp = functools.partial(FilePathBuilder.gen_first_window_path, self.benchmark, mode)
self._run_parallel(query_line_path_temp, prediction_path_template)
|
CodeT/RepoCoder/search_code.py/0
|
{
"file_path": "CodeT/RepoCoder/search_code.py",
"repo_id": "CodeT",
"token_count": 2776
}
| 219 |
#!/usr/bin/env bash
uninstall()
{
# Path to Codex CLI source
local CODEX_CLI_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )"
# Path to OpenAI API settings
local OPENAI_RC_FILE="$CODEX_CLI_PATH/src/openaiapirc"
# Path to Bash settings loaded when a Bash session starts
local BASH_RC_FILE="$HOME/.codexclirc"
# Remove the plugin loaded by .bashrc
rm -f $BASH_RC_FILE
# Remove credentials and other personal settings
rm -f $OPENAI_RC_FILE
# Remove key binding (works only for sourced script calls)
if [ $SOURCED -eq 1 ]; then
bind -r "\C-g"
fi
echo "Codex CLI has been removed."
}
# Detect if the script is sourced
(return 0 2>/dev/null) && SOURCED=1 || SOURCED=0
uninstall
unset SOURCED
|
Codex-CLI/scripts/bash_cleanup.sh/0
|
{
"file_path": "Codex-CLI/scripts/bash_cleanup.sh",
"repo_id": "Codex-CLI",
"token_count": 306
}
| 220 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
File: test_large_face_list_face.py
Description: Unittests for Large Face List Face section of the Cognitive Face
API.
"""
import unittest
import cognitive_face as CF
from . import util
class TestFaceList(unittest.TestCase):
"""Unittests for Large Face List Face section."""
def test_face(self):
"""Unittests for `large_face_list_face.add`,
`large_face_list_face.update` and `large_face_list_face.delete`."""
image = '{}PersonGroup/Family1-Dad/Family1-Dad3.jpg'.format(
util.BASE_URL_IMAGE)
res = CF.large_face_list_face.add(image,
util.DataStore.large_face_list_id)
print(res)
self.assertIsInstance(res, dict)
util.wait()
persisted_face_id = res['persistedFaceId']
res = CF.large_face_list_face.update(util.DataStore.large_face_list_id,
persisted_face_id, "TempUserData")
print(res)
self.assertIsInstance(res, dict)
util.wait()
res = CF.large_face_list_face.delete(util.DataStore.large_face_list_id,
persisted_face_id)
print(res)
self.assertIsInstance(res, dict)
util.wait()
def test_get(self):
"""Unittests for `large_face_list_face.get`."""
res = CF.large_face_list_face.get(
util.DataStore.large_face_list_id,
util.DataStore.large_face_list_face_id['Dad'][0])
print(res)
self.assertIsInstance(res, dict)
util.wait()
def test_list(self):
"""Unittest for `large_face_list_face.list`."""
res = CF.large_face_list_face.list(util.DataStore.large_face_list_id)
print(res)
self.assertIsInstance(res, list)
util.wait()
|
Cognitive-Face-Python/cognitive_face/tests/test_large_face_list_face.py/0
|
{
"file_path": "Cognitive-Face-Python/cognitive_face/tests/test_large_face_list_face.py",
"repo_id": "Cognitive-Face-Python",
"token_count": 909
}
| 221 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
File: panel_find_similar.py
Description: Find Similar Panel for Python SDK sample.
"""
import os
import uuid
import wx
import wx.lib.scrolledpanel as scrolled
import util
import model
from view import base
class FindSimilarPanel(base.MyPanel):
"""FindSimilar Panel."""
def __init__(self, parent):
super(FindSimilarPanel, self).__init__(parent)
self.large_face_list_id = str(uuid.uuid1())
self.face_paths = []
self.detected_face_paths = []
self.faces = {}
self.persisted_faces = {}
self.vsizer = wx.BoxSizer(wx.VERTICAL)
self.panel = scrolled.ScrolledPanel(self)
self.hsizer = wx.BoxSizer()
self.hsizer.AddStretchSpacer()
self.hvsizer = wx.BoxSizer(wx.VERTICAL)
self.hvsizer.SetMinSize((util.INNER_PANEL_WIDTH, -1))
label = ('Find faces that are similar to a given face (the query '
'face).\nClick "Load Candidate Faces" to select a folder '
'containing images of the faces you want to compare to the '
'query face.\nNext, click "Open Query Face" to select the '
'query face image.\nScroll down to see the results '
'displayed under the query face.\n')
self.static_text = wx.StaticText(self.panel, label=label)
self.static_text.Wrap(util.INNER_PANEL_WIDTH)
self.hvsizer.Add(self.static_text, 0, wx.ALL, 0)
self.vhsizer = wx.BoxSizer()
self.lsizer = wx.BoxSizer(wx.VERTICAL)
self.lsizer.SetMinSize((util.MAX_IMAGE_SIZE, -1))
flag = wx.EXPAND | wx.ALIGN_CENTER | wx.ALL
self.btn_folder = wx.Button(self.panel, label='Load Candidate Faces')
self.lsizer.Add(self.btn_folder, 0, flag, 5)
self.Bind(wx.EVT_BUTTON, self.OnChooseFolder, self.btn_folder)
flag = wx.ALIGN_CENTER | wx.ALL
self.grid = base.MyGridStaticBitmap(self.panel, 0, 4, 0, 0)
self.lsizer.Add(self.grid, 0, flag, 5)
self.vhsizer.Add(self.lsizer, 1, wx.EXPAND)
self.vhsizer.AddSpacer(90)
self.rsizer = wx.BoxSizer(wx.VERTICAL)
self.rsizer.SetMinSize((util.MAX_IMAGE_SIZE, -1))
flag = wx.EXPAND | wx.ALIGN_CENTER | wx.ALL
self.btn_file = wx.Button(self.panel, label='Open Query Face')
self.rsizer.Add(self.btn_file, 0, flag, 5)
self.Bind(wx.EVT_BUTTON, self.OnChooseImage, self.btn_file)
flag = wx.ALIGN_CENTER | wx.ALL
self.bitmap = base.MyStaticBitmap(self.panel)
self.rsizer.Add(self.bitmap, 0, flag, 5)
self.result = base.FindSimilarsResult(self.panel)
self.rsizer.Add(self.result, 0, wx.EXPAND)
self.vhsizer.Add(self.rsizer, 1, wx.EXPAND)
self.hvsizer.Add(self.vhsizer)
self.hsizer.Add(self.hvsizer)
self.hsizer.AddStretchSpacer()
self.hsizer.Layout()
self.panel.SetSizer(self.hsizer)
self.panel.Layout()
self.panel.SetupScrolling(scroll_x=False)
self.vsizer.Add(self.panel, 3, wx.EXPAND)
self.log = base.MyLog(self)
self.vsizer.Add(self.log, 1, wx.EXPAND)
self.SetSizerAndFit(self.vsizer)
self.btn_file.Disable()
def OnChooseFolder(self, evt):
"""Choose Folder."""
large_face_list_exists = False
try:
self.log.log((
'Request: List {} will be used to build a person database. '
'Checking whether the list exists.').format(
self.large_face_list_id))
print(dir(util.CF))
print(util.CF.__file__)
util.CF.large_face_list.get(self.large_face_list_id)
large_face_list_exists = True
self.log.log(
'Response: List {} exists.'.format(
self.large_face_list_id))
except util.CF.CognitiveFaceException as exp:
if exp.code != 'LargeFaceListNotFound':
self.log.log('Response: {}. {}'.format(exp.code, exp.msg))
return
else:
self.log.log(
'Response: List {} did not exist previously.'
.format(self.large_face_list_id))
if large_face_list_exists:
text = (
'Requires a clean up for list "{0}" before setting up a new'
'list. Click OK to proceed, list "{0}" will be cleared.'
).format(self.large_face_list_id)
title = 'Warning'
style = wx.YES_NO | wx.ICON_WARNING
result = wx.MessageBox(text, title, style)
if result == wx.YES:
util.CF.large_face_list.delete(self.large_face_list_id)
self.large_face_list_id = str(uuid.uuid1())
else:
return
dlg = wx.DirDialog(self)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
del self.face_paths[:]
for root, dirs, files in os.walk(path):
if files:
self.face_paths.extend(
[os.path.join(root, filename) for filename in files])
self.panel.SetupScrolling(scroll_x=False)
self.log.log('Request: Preparing, detecting faces in chosen folder.')
self.btn_folder.Disable()
self.btn_file.Disable()
self.persisted_faces.clear()
del self.detected_face_paths[:]
util.CF.large_face_list.create(self.large_face_list_id)
for path in self.face_paths:
try:
res = util.CF.large_face_list_face.add(
path, self.large_face_list_id)
self.log.log(
'Response: Success. Add with Persisted Face Id {}'.format(
res['persistedFaceId']))
except util.CF.CognitiveFaceException as exp:
self.log.log((
'[Error] Add "{}" to List {}: Code: {}, '
'Message: {}').format(
path, self.large_face_list_id, exp.code, exp.msg))
continue
self.detected_face_paths.append(path)
face = model.Face(res, path)
self.persisted_faces[face.persisted_id] = face
self.log.log('Response: Success. Total {0} faces are detected.'.format(
len(self.persisted_faces)))
self.log.log(
'Request: Training List: "{0}"'.format(self.large_face_list_id))
res = util.CF.large_face_list.train(self.large_face_list_id)
self.grid.set_paths(self.detected_face_paths)
self.btn_folder.Enable()
self.btn_file.Enable()
def OnChooseImage(self, evt):
"""Choose Image."""
util.CF.util.wait_for_large_face_list_training(self.large_face_list_id)
self.log.log(
'Response: Success. List "{0}" training process is Succeeded'.
format(self.large_face_list_id))
dlg = wx.FileDialog(self, wildcard=util.IMAGE_WILDCARD)
if dlg.ShowModal() != wx.ID_OK:
return
path = dlg.GetPath()
self.bitmap.set_path(path)
self.log.log('Detecting faces in {}'.format(path))
self.faces.clear()
res = util.CF.face.detect(path)
for entry in res:
face = model.Face(entry, path)
self.faces[face.id] = face
util.draw_bitmap_rectangle(self.bitmap, self.faces.values())
self.log.log(
'Success. Detected {} face(s) in {}'.format(len(self.faces), path))
res_tot = {
'matchPerson': {},
'matchFace': {},
}
for face_id in self.faces:
self.log.log((
'Request: Finding similar faces in Person Match Mode for '
'face {}').format(face_id))
for mode in ('matchPerson', 'matchFace'):
res_tot[mode][face_id] = []
res = util.CF.face.find_similars(
face_id,
large_face_list_id=self.large_face_list_id,
mode=mode)
self.log.log(
'Response: Found {} similar faces for face {} in {} mode'.
format(len(res), face_id, mode))
for entry in res:
persisted_id = entry['persistedFaceId']
confidence = entry['confidence']
res_tot[mode][face_id].append(
(self.persisted_faces[persisted_id], confidence))
self.result.set_data(self.faces, res_tot)
self.panel.SetupScrolling(scroll_x=False)
|
Cognitive-Face-Python/sample/view/panel_find_similar.py/0
|
{
"file_path": "Cognitive-Face-Python/sample/view/panel_find_similar.py",
"repo_id": "Cognitive-Face-Python",
"token_count": 4383
}
| 222 |
export CUDA_VISIBLE_DEVICES=6
python t5_run_train.py \
--model_name_or_path t5-base \
--subtask Mod \
--method ContrastExp \
--train_file pretrain_contrast \
--max_steps 100000 \
--save_steps 100000 \
--batch_size 8 \
--ebatch_size 16 \
--gas 1 \
--seed 1 \
--set set1
|
ContextualSP/abstraction_probing/code/t5_code/Mod_ContrastExp_pretrain.sh/0
|
{
"file_path": "ContextualSP/abstraction_probing/code/t5_code/Mod_ContrastExp_pretrain.sh",
"repo_id": "ContextualSP",
"token_count": 106
}
| 223 |
import argparse
from data_utils import load_data, load_score_file
from data_utils.metrics import calc_metrics
from experiments.exp_def import TaskDefs
parser = argparse.ArgumentParser()
parser.add_argument(
"--task_def", type=str, default="experiments/glue/glue_task_def.yml"
)
parser.add_argument("--task", type=str)
parser.add_argument("--std_input", type=str)
parser.add_argument("--score", type=str)
def generate_golds_predictions_scores(sample_id_2_pred_score_seg_dic, sample_objs):
sample_id_2_label_dic = {}
for sample_obj in sample_objs:
sample_id, label = sample_obj["uid"], sample_obj["label"]
sample_id_2_label_dic[sample_id] = label
assert set(sample_id_2_label_dic.keys()) == set(
sample_id_2_pred_score_seg_dic.keys()
)
golds = []
predictions = []
scores = []
for sample_id, label in sample_id_2_label_dic.items():
golds.append(label)
pred, score_seg = sample_id_2_pred_score_seg_dic[sample_id]
predictions.append(pred)
scores.extend(score_seg)
return golds, predictions, scores
args = parser.parse_args()
task_def_path = args.task_def
task_defs = TaskDefs(task_def_path)
task_def = task_defs.get_task_def(args.task)
n_class = task_def.n_class
sample_id_2_pred_score_seg_dic = load_score_file(args.score, n_class)
data_type = task_def.data_type
task_type = task_def.task_type
label_mapper = task_def.label_vocab
sample_objs = load_data(args.std_input, data_type, task_type, label_mapper)
golds, predictions, scores = generate_golds_predictions_scores(
sample_id_2_pred_score_seg_dic, sample_objs
)
metrics = calc_metrics(task_def.metric_meta, golds, predictions, scores)
print(metrics)
|
ContextualSP/adaptershare/calc_metrics.py/0
|
{
"file_path": "ContextualSP/adaptershare/calc_metrics.py",
"repo_id": "ContextualSP",
"token_count": 697
}
| 224 |
import os
import argparse
import random
from sys import path
path.append(os.getcwd())
from experiments.common_utils import dump_rows
from data_utils.task_def import DataFormat
from data_utils.log_wrapper import create_logger
logger = create_logger(__name__, to_disk=True, log_file="domain_prepro.log")
def load_scitail(file):
"""Loading data of scitail"""
rows = []
cnt = 0
with open(file, encoding="utf8") as f:
for line in f:
blocks = line.strip().split("\t")
assert len(blocks) > 2
if blocks[0] == "-":
continue
sample = {
"uid": str(cnt),
"premise": blocks[0],
"hypothesis": blocks[1],
"label": blocks[2],
}
rows.append(sample)
cnt += 1
return rows
def load_snli(file, header=True):
rows = []
cnt = 0
with open(file, encoding="utf8") as f:
for line in f:
if header:
header = False
continue
blocks = line.strip().split("\t")
assert len(blocks) > 10
if blocks[-1] == "-":
continue
lab = blocks[-1]
if lab is None:
import pdb
pdb.set_trace()
sample = {
"uid": blocks[0],
"premise": blocks[7],
"hypothesis": blocks[8],
"label": lab,
}
rows.append(sample)
cnt += 1
return rows
def parse_args():
parser = argparse.ArgumentParser(
description="Preprocessing GLUE/SNLI/SciTail dataset."
)
parser.add_argument("--seed", type=int, default=13)
parser.add_argument("--root_dir", type=str, default="data")
parser.add_argument(
"--old_glue",
action="store_true",
help="whether it is old GLUE, refer official GLUE webpage for details",
)
args = parser.parse_args()
return args
def main(args):
is_old_glue = args.old_glue
root = args.root_dir
assert os.path.exists(root)
######################################
# SNLI/SciTail Tasks
######################################
scitail_train_path = os.path.join(root, "SciTail/tsv_format/scitail_1.0_train.tsv")
scitail_dev_path = os.path.join(root, "SciTail/tsv_format/scitail_1.0_dev.tsv")
scitail_test_path = os.path.join(root, "SciTail/tsv_format/scitail_1.0_test.tsv")
snli_train_path = os.path.join(root, "SNLI/train.tsv")
snli_dev_path = os.path.join(root, "SNLI/dev.tsv")
snli_test_path = os.path.join(root, "SNLI/test.tsv")
######################################
# Loading DATA
######################################
scitail_train_data = load_scitail(scitail_train_path)
scitail_dev_data = load_scitail(scitail_dev_path)
scitail_test_data = load_scitail(scitail_test_path)
logger.info("Loaded {} SciTail train samples".format(len(scitail_train_data)))
logger.info("Loaded {} SciTail dev samples".format(len(scitail_dev_data)))
logger.info("Loaded {} SciTail test samples".format(len(scitail_test_data)))
snli_train_data = load_snli(snli_train_path)
snli_dev_data = load_snli(snli_dev_path)
snli_test_data = load_snli(snli_test_path)
logger.info("Loaded {} SNLI train samples".format(len(snli_train_data)))
logger.info("Loaded {} SNLI dev samples".format(len(snli_dev_data)))
logger.info("Loaded {} SNLI test samples".format(len(snli_test_data)))
canonical_data_suffix = "canonical_data"
canonical_data_root = os.path.join(root, canonical_data_suffix)
if not os.path.isdir(canonical_data_root):
os.mkdir(canonical_data_root)
# BUILD SciTail
scitail_train_fout = os.path.join(canonical_data_root, "scitail_train.tsv")
scitail_dev_fout = os.path.join(canonical_data_root, "scitail_dev.tsv")
scitail_test_fout = os.path.join(canonical_data_root, "scitail_test.tsv")
dump_rows(
scitail_train_data, scitail_train_fout, DataFormat.PremiseAndOneHypothesis
)
dump_rows(scitail_dev_data, scitail_dev_fout, DataFormat.PremiseAndOneHypothesis)
dump_rows(scitail_test_data, scitail_test_fout, DataFormat.PremiseAndOneHypothesis)
logger.info("done with scitail")
# BUILD SNLI
snli_train_fout = os.path.join(canonical_data_root, "snli_train.tsv")
snli_dev_fout = os.path.join(canonical_data_root, "snli_dev.tsv")
snli_test_fout = os.path.join(canonical_data_root, "snli_test.tsv")
dump_rows(snli_train_data, snli_train_fout, DataFormat.PremiseAndOneHypothesis)
dump_rows(snli_dev_data, snli_dev_fout, DataFormat.PremiseAndOneHypothesis)
dump_rows(snli_test_data, snli_test_fout, DataFormat.PremiseAndOneHypothesis)
logger.info("done with snli")
if __name__ == "__main__":
args = parse_args()
main(args)
|
ContextualSP/adaptershare/experiments/domain_adaptation/domain_prepro.py/0
|
{
"file_path": "ContextualSP/adaptershare/experiments/domain_adaptation/domain_prepro.py",
"repo_id": "ContextualSP",
"token_count": 2238
}
| 225 |
ner:
data_format: Seqence
dropout_p: 0.3
enable_san: False
labels:
- O
- B-MISC
- I-MISC
- B-PER
- I-PER
- B-ORG
- I-ORG
- B-LOC
- I-LOC
- X
- CLS
- SEP
metric_meta:
- SeqEval
n_class: 12
loss: SeqCeCriterion
kd_loss: MseCriterion
adv_loss: SymKlCriterion
split_names:
- train
- dev
- test
task_type: SeqenceLabeling
pos:
data_format: Seqence
dropout_p: 0.1
enable_san: False
labels:
- ','
- \
- ':'
- '.'
- "''"
- '"'
- (
- )
- $
- CC
- CD
- DT
- EX
- FW
- IN
- JJ
- JJR
- JJS
- LS
- MD
- NN
- NNP
- NNPS
- NNS
- NN|SYM
- PDT
- POS
- PRP
- PRP$
- RB
- RBR
- RBS
- RP
- SYM
- TO
- UH
- VB
- VBD
- VBG
- VBN
- VBP
- VBZ
- WDT
- WP
- WP$
- WRB
- X
- CLS
- SEP
metric_meta:
- SeqEval
n_class: 49
loss: SeqCeCriterion
loss: SeqCeCriterion
kd_loss: MseCriterion
adv_loss: SymKlCriterion
split_names:
- train
- dev
- test
task_type: SeqenceLabeling
chunk:
data_format: Seqence
dropout_p: 0.1
enable_san: False
labels:
- B-ADJP
- B-ADVP
- B-CONJP
- B-INTJ
- B-LST
- B-NP
- B-PP
- B-PRT
- B-SBAR
- B-VP
- I-ADJP
- I-ADVP
- I-CONJP
- I-INTJ
- I-LST
- I-NP
- I-PP
- I-SBAR
- I-VP
- O
- X
- CLS
- SEP
metric_meta:
- SeqEval
n_class: 23
loss: SeqCeCriterion
loss: SeqCeCriterion
kd_loss: MseCriterion
adv_loss: SymKlCriterion
split_names:
- train
- dev
- test
task_type: SeqenceLabeling
|
ContextualSP/adaptershare/experiments/ner/ner_task_def.yml/0
|
{
"file_path": "ContextualSP/adaptershare/experiments/ner/ner_task_def.yml",
"repo_id": "ContextualSP",
"token_count": 807
}
| 226 |
# coding=utf-8
# Copyright (c) Microsoft. All rights reserved.
from copy import deepcopy
import torch
from torch.nn import Parameter
from functools import wraps
class EMA:
def __init__(self, gamma, model):
super(EMA, self).__init__()
self.gamma = gamma
self.shadow = {}
self.model = model
self.setup()
def setup(self):
for name, para in self.model.named_parameters():
if para.requires_grad:
self.shadow[name] = para.clone()
def cuda(self):
for k, v in self.shadow.items():
self.shadow[k] = v.cuda()
def update(self):
for name, para in self.model.named_parameters():
if para.requires_grad:
self.shadow[name] = (
1.0 - self.gamma
) * para + self.gamma * self.shadow[name]
def swap_parameters(self):
for name, para in self.model.named_parameters():
if para.requires_grad:
temp_data = para.data
para.data = self.shadow[name].data
self.shadow[name].data = temp_data
def state_dict(self):
return self.shadow
# Adapted from
# https://github.com/pytorch/pytorch/blob/master/torch/nn/utils/weight_norm.py
# and https://github.com/salesforce/awd-lstm-lm/blob/master/weight_drop.py
def _norm(p, dim):
"""Computes the norm over all dimensions except dim"""
if dim is None:
return p.norm()
elif dim == 0:
output_size = (p.size(0),) + (1,) * (p.dim() - 1)
return p.contiguous().view(p.size(0), -1).norm(dim=1).view(*output_size)
elif dim == p.dim() - 1:
output_size = (1,) * (p.dim() - 1) + (p.size(-1),)
return p.contiguous().view(-1, p.size(-1)).norm(dim=0).view(*output_size)
else:
return _norm(p.transpose(0, dim), 0).transpose(0, dim)
def _dummy(*args, **kwargs):
# We need to replace flatten_parameters with a nothing function
return
class WeightNorm(torch.nn.Module):
def __init__(self, weights, dim):
super(WeightNorm, self).__init__()
self.weights = weights
self.dim = dim
def compute_weight(self, module, name):
g = getattr(module, name + "_g")
v = getattr(module, name + "_v")
return v * (g / _norm(v, self.dim))
@staticmethod
def apply(module, weights, dim):
# Terrible temporary solution to an issue regarding compacting weights
# re: CUDNN RNN
if issubclass(type(module), torch.nn.RNNBase):
module.flatten_parameters = _dummy
if weights is None: # do for all weight params
weights = [w for w in module._parameters.keys() if "weight" in w]
fn = WeightNorm(weights, dim)
for name in weights:
if hasattr(module, name):
print("Applying weight norm to {} - {}".format(str(module), name))
weight = getattr(module, name)
del module._parameters[name]
module.register_parameter(
name + "_g", Parameter(_norm(weight, dim).data)
)
module.register_parameter(name + "_v", Parameter(weight.data))
setattr(module, name, fn.compute_weight(module, name))
module.register_forward_pre_hook(fn)
return fn
def remove(self, module):
for name in self.weights:
weight = self.compute_weight(module)
delattr(module, name)
del module._parameters[name + "_g"]
del module._parameters[name + "_v"]
module.register_parameter(name, Parameter(weight.data))
def __call__(self, module, inputs):
for name in self.weights:
setattr(module, name, self.compute_weight(module, name))
def weight_norm(module, weights=None, dim=0):
WeightNorm.apply(module, weights, dim)
return module
|
ContextualSP/adaptershare/module/my_optim.py/0
|
{
"file_path": "ContextualSP/adaptershare/module/my_optim.py",
"repo_id": "ContextualSP",
"token_count": 1761
}
| 227 |
import argparse
from data_utils import load_score_file
from experiments.exp_def import TaskDefs
parser = argparse.ArgumentParser()
parser.add_argument(
"--task_def", type=str, default="experiments/glue/glue_task_def.yml"
)
parser.add_argument("--task", type=str)
parser.add_argument(
"--add_soft_label",
action="store_true",
help="without this option, we replace hard label with soft label",
)
parser.add_argument("--std_input", type=str)
parser.add_argument("--score", type=str)
parser.add_argument("--std_output", type=str)
args = parser.parse_args()
task_def_path = args.task_def
task = args.task
task_defs = TaskDefs(task_def_path)
n_class = task_defs.get_task_def(task).n_class
sample_id_2_pred_score_seg_dic = load_score_file(args.score, n_class)
with open(args.std_output, "w", encoding="utf-8") as out_f:
for line in open(args.std_input, encoding="utf-8"):
fields = line.strip("\n").split("\t")
sample_id = fields[0]
target_score_idx = 1 # TODO: here we assume binary classification task
score = sample_id_2_pred_score_seg_dic[sample_id][1][target_score_idx]
if args.add_soft_label:
fields = fields[:2] + [str(score)] + fields[2:]
else:
fields[1] = str(score)
out_f.write("\t".join(fields))
out_f.write("\n")
|
ContextualSP/adaptershare/prepare_distillation_data.py/0
|
{
"file_path": "ContextualSP/adaptershare/prepare_distillation_data.py",
"repo_id": "ContextualSP",
"token_count": 554
}
| 228 |
# %%
from collections import defaultdict
import os
import json
from dataclasses import dataclass, field
from enum import Enum
from typing import List, Dict
import pandas as pd
# %%
class NLBindingType(int, Enum):
Null = 0
Table = 1 # table in DB
Column = 2 # table column header
Value = 3 # 1) Cell value in given table 2) Number|DateTime|String, which may not exist in given table
Function = 4 # Aggregation, such as MAX, MIN, AVG, ...
Operator = 5 # Comparsion operator, such as ==, >, <=, starts_with, contains...
def __str__(self) -> str:
return ['', 'Tbl', 'Col', 'Val', 'Func', 'Op'][self.value]
@dataclass
class NLBindingToken:
text: str # original text of binding token
type: NLBindingType # binding type
value: str # unique value, which means we can use 'type' and 'value' to find a unique entity (table/column/value, ...)
def __str__(self) -> str:
if self.type == NLBindingType.Null:
return self.text
return "{}/[{}::{}]".format(self.text, str(self.type), self.value)
@dataclass
class NLBindingExample:
unique_id: str
table_id: str
question: str
binding_tokens: List[NLBindingToken]
tag: str = field(default="")
def to_json(self) -> Dict:
pass
@property
def question_tokens(self) -> List[str]:
return [x.text for x in self.binding_tokens]
@property
def serialized_string(self):
items = []
items.append(self.unique_id)
# items.append(self.table_id)
items.append(" ".join([str(x) for x in self.binding_tokens]))
return '\t'.join(items)
# %%
keywords = defaultdict(set)
def parse_squall_align_token(token: str, align_label: str, align_value: object) -> NLBindingToken:
if align_label == 'None':
return NLBindingToken(text=token, type=NLBindingType.Null, value=None)
if align_label == 'Column':
assert isinstance(align_value, str), align_value
return NLBindingToken(text=token, type=NLBindingType.Column, value=align_value)
if align_label == 'Keyword':
assert isinstance(align_value, list), align_value
keywords[align_value[0]].add(align_value[1])
return NLBindingToken(text=token, type=NLBindingType.Function, value="_".join(align_value))
if align_label == 'Literal':
return NLBindingToken(text=token, type=NLBindingType.Value, value=token)
raise NotImplementedError()
def load_squall_data(path: str):
raw_examples = json.load(open(path, 'r', encoding='utf-8'))
print('load {} examples from {} over.'.format(len(raw_examples), path))
binding_examples = []
for raw_example in raw_examples:
question_tokens = raw_example['nl']
assert len(question_tokens) == len(raw_example['nl_ralign'])
binding_tokens = []
for i, (align_label, align_value) in enumerate(raw_example['nl_ralign']):
binding_tokens += [parse_squall_align_token(question_tokens[i], align_label, align_value)]
binding_example = NLBindingExample(
unique_id="WTQ_Squall__{}".format(raw_example['nt']),
table_id="WTQ_Squall_{}".format(raw_example['tbl']),
question=" ".join(question_tokens),
binding_tokens=binding_tokens
)
binding_examples += [binding_example]
df = pd.DataFrame(
data=[[ex.unique_id, ex.table_id, " ".join([str(x) for x in ex.binding_tokens])] for ex in binding_examples],
columns=['id', 'table_id', 'binding_tokens'],
)
return binding_examples, df
dev_examples, dev_df = load_squall_data(r'../data/squall/dev-0.json')
# %%
for key, val in keywords.items():
print(key, val)
# %%
dev_df.to_csv(r'../data/squall/dev.binding.csv', index=False, decimal='\t')
# %%
|
ContextualSP/awakening_latent_grounding/scripts/binding_annotate.squall.py/0
|
{
"file_path": "ContextualSP/awakening_latent_grounding/scripts/binding_annotate.squall.py",
"repo_id": "ContextualSP",
"token_count": 1538
}
| 229 |
{
"random_seed": 42,
"numpy_seed": 42,
"pytorch_seed": 42,
"dataset_reader": {
"type": "rewrite",
"lazy": false,
"super_mode": "before",
"joint_encoding": true,
"extra_stop_words": [
"of",
"about",
"the",
"any",
"for"
]
},
"model": {
"type": "rewrite",
"word_embedder": {
"tokens": {
"type": "embedding",
"embedding_dim": 100,
"trainable": true,
"padding_index": 0
}
},
"text_encoder": {
"type": "lstm",
"input_size": 100,
"hidden_size": 300,
"bidirectional": true,
"num_layers": 1
},
"inp_drop_rate": 0.1,
"out_drop_rate": 0.1,
"feature_sel": 115,
"loss_weights": [
0.3,
0.3,
0.4
],
"super_mode": "before",
"enable_training_log": true,
"unet_down_channel": 128
},
"iterator": {
"type": "basic",
"batch_size": 12
},
"validation_iterator": {
"type": "basic",
"batch_size": 12
},
"trainer": {
"num_epochs": 100,
"cuda_device": 0,
"patience": 10,
"validation_metric": "+EM",
"optimizer": {
"type": "adam",
"lr": 2e-4,
"weight_decay": 1e-5
},
"num_serialized_models_to_keep": 10,
"should_log_learning_rate": true
}
}
|
ContextualSP/incomplete_utterance_rewriting/configs/task.jsonnet/0
|
{
"file_path": "ContextualSP/incomplete_utterance_rewriting/configs/task.jsonnet",
"repo_id": "ContextualSP",
"token_count": 604
}
| 230 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# Author: Qian Liu (SivilTaram)
# Original Repo: https://github.com/microsoft/ContextualSP
from typing import List
from typing import Tuple
import nltk
from allennlp.training.metrics.metric import Metric
from nltk.translate.bleu_score import corpus_bleu
from overrides import overrides
from rouge import Rouge
from simplediff import diff
class SpecialSymbol:
context_internal = '[SEP]'
end_placeholder = '[END]'
@Metric.register('batch_average')
class BatchAverage(Metric):
"""
This :class:`Metric` breaks with the typical ``Metric`` API and just stores values that were
computed in some fashion outside of a ``Metric``. If you have some external code that computes
the metric for you, for instance, you can use this to report the average result using our
``Metric`` API.
"""
def __init__(self) -> None:
self._total_value = 0.0
self._count = 0
@overrides
def __call__(self, values: List):
"""
Parameters
----------
value : ``float``
The value to average.
"""
for value in values:
self._total_value += value
self._count += 1
@overrides
def get_metric(self, reset: bool = False):
"""
Returns
-------
The average of all values that were passed to ``__call__``.
"""
average_value = (self._total_value / self._count if self._count > 0
else 0)
if reset:
self.reset()
return average_value
@overrides
def reset(self):
self._total_value = 0.0
self._count = 0
@Metric.register('f_score')
class FScoreMetric(Metric):
"""
This :class:`Metric` breaks with the typical ``Metric`` API and just stores values that were
computed in some fashion outside of a ``Metric``. If you have some external code that computes
the metric for you, for instance, you can use this to report the average result using our
``Metric`` API.
"""
def __init__(self, prefix) -> None:
self._total_inter_count = 0.0
self._total_pred_count = 0.0
self._total_ref_count = 0.0
self._prefix = prefix
@overrides
def __call__(self, inter_list: List,
pred_list: List,
ref_list: List):
for inter_count, pred_count, ref_count in zip(inter_list, pred_list, ref_list):
self._total_inter_count += inter_count
self._total_pred_count += pred_count
self._total_ref_count += ref_count
@overrides
def get_metric(self, reset: bool = False):
"""
Returns
-------
The average of all values that were passed to ``__call__``.
"""
precision = (self._total_inter_count / self._total_pred_count
if self._total_pred_count > 0 else 0)
recall = (self._total_inter_count / self._total_ref_count
if self._total_ref_count > 0 else 0)
fscore = 2 * precision * recall / (precision + recall) if precision > 0 and recall > 0 else 0
if reset:
self.reset()
return {
'_P' + self._prefix: precision,
'_R' + self._prefix: recall,
'F' + self._prefix: fscore
}
@overrides
def reset(self):
self._total_ref_count = 0.0
self._total_pred_count = 0.0
self._total_inter_count = 0.0
@Metric.register('corpus_bleu')
class CorpusBLEUMetric(Metric):
"""
This :class:`Metric` breaks with the typical ``Metric`` API and just stores values that were
computed in some fashion outside of a ``Metric``. If you have some external code that computes
the metric for you, for instance, you can use this to report the average result using our
``Metric`` API.
"""
def __init__(self) -> None:
self._total_reference = []
self._total_prediction = []
@overrides
def __call__(self, reference: List[str], prediction: List[str]):
ref_list = [[ref.split(' ')] for ref in reference]
pred_list = [pred.split(' ') for pred in prediction]
self._total_reference.extend(ref_list)
self._total_prediction.extend(pred_list)
@overrides
def get_metric(self, reset: bool = False):
"""
Returns
-------
The average of all values that were passed to ``__call__``.
"""
if len(self._total_prediction) > 0:
bleu1s = corpus_bleu(self._total_reference, self._total_prediction, weights=(1.0, 0.0, 0.0, 0.0))
bleu2s = corpus_bleu(self._total_reference, self._total_prediction, weights=(0.5, 0.5, 0.0, 0.0))
bleu3s = corpus_bleu(self._total_reference, self._total_prediction, weights=(0.33, 0.33, 0.33, 0.0))
bleu4s = corpus_bleu(self._total_reference, self._total_prediction, weights=(0.25, 0.25, 0.25, 0.25))
else:
bleu1s = 0
bleu2s = 0
bleu3s = 0
bleu4s = 0
if reset:
self.reset()
return {
'_BLEU1': bleu1s,
'_BLEU2': bleu2s,
'_BLEU3': bleu3s,
'BLEU4': bleu4s
}
@overrides
def reset(self):
self._total_reference = []
self._total_prediction = []
class Scorer(object):
@staticmethod
def em_score(references, candidates):
matches = []
for ref, cand in zip(references, candidates):
if ref == cand:
matches.append(1)
else:
matches.append(0)
return matches
@staticmethod
def rouge_score(references, candidates):
"""
https://github.com/pltrdy/rouge
:param references: list string
:param candidates: list string
:return:
"""
rouge = Rouge()
rouge1s = []
rouge2s = []
rougels = []
for ref, cand in zip(references, candidates):
if cand.strip() == '':
cand = 'hello'
rouge_score = rouge.get_scores(cand, ref)
rouge_1 = rouge_score[0]['rouge-1']['f']
rouge_2 = rouge_score[0]['rouge-2']['f']
rouge_l = rouge_score[0]['rouge-l']['f']
rouge1s.append(rouge_1)
rouge2s.append(rouge_2)
rougels.append(rouge_l)
return rouge1s, rouge2s, rougels
@staticmethod
def restored_count(references, predictions, currents):
def score_function(ref_n_gram, pred_n_gram, ref_restore, pred_restore):
ref_restore = set(ref_restore)
pred_restore = set(pred_restore)
ref_n_gram = set([ngram_phrase for ngram_phrase in ref_n_gram if
set(ngram_phrase) & ref_restore])
pred_n_gram = set([ngram_phrase for ngram_phrase in pred_n_gram if
set(ngram_phrase) & pred_restore])
inter_count = len(ref_n_gram & pred_n_gram)
pred_count = len(pred_n_gram)
ref_count = len(ref_n_gram)
return inter_count, pred_count, ref_count
inter_count_1 = []
pred_count_1 = []
ref_count_1 = []
inter_count_2 = []
pred_count_2 = []
ref_count_2 = []
inter_count_3 = []
pred_count_3 = []
ref_count_3 = []
for ref, cand, cur in zip(references, predictions, currents):
ref_tokens = ref.split(' ')
pred_tokens = cand.split(' ')
cur_tokens = cur.split(' ')
ref_restore_tokens = [token for token in ref_tokens if token not in
cur_tokens]
pred_restore_tokens = [token for token in pred_tokens if token not in
cur_tokens]
if len(ref_restore_tokens) == 0:
continue
ref_ngram_1 = list(nltk.ngrams(ref_tokens, n=1))
pred_ngram_1 = list(nltk.ngrams(pred_tokens, n=1))
inter_1, pred_1, ref_1 = score_function(ref_ngram_1, pred_ngram_1, ref_restore_tokens, pred_restore_tokens)
ref_ngram_2 = list(nltk.ngrams(ref_tokens, n=2))
pred_ngram_2 = list(nltk.ngrams(pred_tokens, n=2))
inter_2, pred_2, ref_2 = score_function(ref_ngram_2, pred_ngram_2, ref_restore_tokens, pred_restore_tokens)
ref_ngram_3 = list(nltk.ngrams(ref_tokens, n=3))
pred_ngram_3 = list(nltk.ngrams(pred_tokens, n=3))
inter_3, pred_3, ref_3 = score_function(ref_ngram_3, pred_ngram_3, ref_restore_tokens, pred_restore_tokens)
inter_count_1.append(inter_1)
pred_count_1.append(pred_1)
ref_count_1.append(ref_1)
inter_count_2.append(inter_2)
pred_count_2.append(pred_2)
ref_count_2.append(ref_2)
inter_count_3.append(inter_3)
pred_count_3.append(pred_3)
ref_count_3.append(ref_3)
return (inter_count_1, pred_count_1, ref_count_1,
inter_count_2, pred_count_2, ref_count_2,
inter_count_3, pred_count_3, ref_count_3)
def export_word_edit_matrix(context: List,
current_sen: List,
label_sen: List,
super_mode: str = 'before',
# if there requires multiple insert, we only
# keep the longest one
only_one_insert: bool = False):
if isinstance(context, str):
context_seq = list(context)
current_seq = list(current_sen)
label_seq = list(label_sen)
else:
context_seq = context
current_seq = current_sen
label_seq = label_sen
applied_changes = diff(current_seq, label_seq)
def sub_finder(cus_list, pattern, used_pos):
find_indices = []
for i in range(len(cus_list)):
if cus_list[i] == pattern[0] and \
cus_list[i:i + len(pattern)] == pattern \
and i not in used_pos:
find_indices.append((i, i + len(pattern)))
if len(find_indices) == 0:
return 0, 0
else:
return find_indices[-1]
def cont_sub_finder(cus_list, pattern, used_pos):
context_len = len(cus_list)
pattern_len = len(pattern)
for i in range(context_len):
k = i
j = 0
temp_indices = []
while j < pattern_len and k < context_len:
if cus_list[k] == pattern[j][0] and \
cus_list[k:k + len(pattern[j])] == pattern[j] \
and k not in used_pos:
temp_indices.append((k, k + len(pattern[j])))
j += 1
else:
k += 1
if j == pattern_len:
return zip(*temp_indices)
else:
return 0, 0
rm_range = None
ret_ops = []
context_used_pos = []
current_used_pos = []
pointer = 0
for diff_sample in applied_changes:
diff_op = diff_sample[0]
diff_content = diff_sample[1]
if diff_op == '-':
if rm_range is not None:
ret_ops.append(['remove', rm_range, []])
start, end = sub_finder(current_seq, diff_content, current_used_pos
)
rm_range = [start, end]
current_used_pos.extend(list(range(start, end)))
elif diff_op == '+':
start, end = sub_finder(context_seq, diff_content, context_used_pos)
# cannot find the exact match substring, we should identify the snippets
if start == 0 and end == 0:
inner_diff = diff(diff_content, context_seq)
overlap_content = [inner_diff_sample[1] for
inner_diff_sample in inner_diff if inner_diff_sample[0] == '=']
if len(overlap_content) > 0:
# only take one insert
if len(overlap_content) == 1 or only_one_insert:
overlap_content = sorted(overlap_content, key=lambda x: len(x), reverse=True)[0]
start, end = sub_finder(context_seq, overlap_content,
context_used_pos)
else:
start_end_tuple = cont_sub_finder(context_seq, overlap_content, context_used_pos)
# start is a list, end is also
start, end = start_end_tuple
else:
start, end = 0, 0
if not (start == 0 and end == 0):
if isinstance(start, int):
add_ranges = [[start, end]]
else:
add_ranges = list(zip(start, end))
if rm_range is not None:
for add_range in add_ranges:
context_used_pos.extend(list(range(add_range[0], add_range[1])))
ret_ops.append(['replace', rm_range, add_range])
rm_range = None
else:
for add_range in add_ranges:
if super_mode in ['before', 'both']:
ret_ops.append(['before', [pointer, pointer], add_range])
if super_mode in ['after', 'both']:
if pointer >= 1:
ret_ops.append(['after', [pointer - 1, pointer - 1], add_range])
elif diff_op == '=':
if rm_range is not None:
ret_ops.append(['remove', rm_range, []])
start, end = sub_finder(current_seq, diff_content, current_used_pos
)
current_used_pos.extend(list(range(start, end)))
rm_range = None
pointer = end
return ret_ops
def transmit_seq(cur_str: str, context_str: str,
op_seq: List[Tuple[str, Tuple, Tuple]]) -> str:
"""
Given an operation sequence as `add/replace`, context_start_end, cur_start_end, transmit the generated sequence
:param op_seq:
:return:
"""
current_seq = cur_str.split(' ')
context_seq = context_str.split(' ')
for operation in op_seq:
opera_op = operation[0]
current_range = operation[1]
context_range = operation[2]
if opera_op == 'replace':
current_seq[current_range[0]:current_range[1]] = context_seq[context_range[0]:context_range[1]]
elif opera_op == 'before':
current_seq[current_range[0]:current_range[0]] = context_seq[context_range[0]:context_range[1]]
elif opera_op == 'after':
current_seq[current_range[0] + 1: current_range[0] + 1] = context_seq[context_range[0]:context_range[1]]
# remove current_seq
ret_str = ' '.join(current_seq).strip()
return ret_str
def get_class_mapping(super_mode: str):
"""
Mapping mode into integer
:param super_mode: before, after & both
:return:
"""
class_mapping = ['none', 'replace']
if super_mode == 'both':
class_mapping.extend(['before', 'after'])
else:
class_mapping.append(super_mode)
return {k: v for v, k in enumerate(class_mapping)}
|
ContextualSP/incomplete_utterance_rewriting/src/data_utils.py/0
|
{
"file_path": "ContextualSP/incomplete_utterance_rewriting/src/data_utils.py",
"repo_id": "ContextualSP",
"token_count": 7736
}
| 231 |
# coding: utf-8
import os
import json
import logging
import pickle as pkl
import numpy as np
from parsers.parser import Parser, IRNetSpiderParser
from src.utils.algo_utils import BipartiteGraphSolver
from src.utils.visualize_utils import draw_attention_hotmap
from src.components.human_simulator import HumanSimulator
from src.utils.semql_tree_util import Node as SemQLTree
from src.aligner_model import BertAligner
from src.data import SpiderSemQLConverter, BertUtil
from src.components import NLModifier, QuestionGenerator
from src.utils.utils import STOP_WORD_LIST, TEMPLATE_KEYWORDS
coterms = [x.strip() for x in open('data/spider/coterms.txt', 'r').readlines()]
stopwords = [x.strip() for x in open('data/common/stop_words.txt', 'r').readlines()]
coterms += stopwords
class SchemaValue:
COLUMN_NAME_TYPE = 'column_name'
TABLE_NAME_TYPE = 'table_name'
AGGR_TYPE = 'aggr'
COLUMN_VALUE_TYPE = 'value'
NONE_TYPE = 'none'
def __init__(self, nl_type, value):
self.type = nl_type
self.value = value
class InteractiveSqlCorrector:
EVALUATE_MODE = 0
SIMULATE_MODE = 1
def __init__(self,
aligner: BertAligner = None,
mode: str = 'evaluate',
human_simulator: HumanSimulator = None,
align_threshold=0.628):
assert mode.lower() in ('evaluate', 'interact'), "Mode must be evaluate or interact"
if aligner:
self.aligner = aligner
else:
self.aligner = BertAligner()
if mode.lower() == 'evaluate':
self.mode = InteractiveSqlCorrector.EVALUATE_MODE
else:
self.mode = InteractiveSqlCorrector.SIMULATE_MODE
if self.mode == InteractiveSqlCorrector.SIMULATE_MODE:
if human_simulator is None:
raise ValueError('Should pass HumanSimulation object')
self.human_simulator = human_simulator
logger.info(f'Alignment threshold is set to be {align_threshold}')
self.align_threshold = align_threshold
self.semql_converter = SpiderSemQLConverter()
self.bert_util = BertUtil()
self.bipartite_graph_solver = BipartiteGraphSolver()
self.question_generator = QuestionGenerator(n_options=3)
self.nl_modifier = NLModifier()
table_path = 'data/spider/tables.json'
self.all_database_info = {_['db_id']: _ for _ in json.load(open(table_path, 'r', encoding='utf-8'))}
# conceptnet_path = 'data/concept_net/conceptnet-assertions-5.6.0.csv'
# self.schema_linker = SchemaLinker(self.table_path, conceptnet_path)
self.nl_to_schema = dict()
def start_session(self, example, predict_sql, label='', question_field='question', query_field='query'):
# generate input for alignment model
question = example[question_field]
query = example[query_field]
db_id = example['db_id']
# Use ground-truth as parsed result
# semql = self.semql_converter.convert_example(example)
# semql_tree = SemQLTree.from_statements([str(_) for _ in semql])
# restatement = semql_tree.restatement()
db_schemas = self.all_database_info[db_id]
table_names = db_schemas['table_names']
column_names = [x[1] for x in db_schemas['column_names'] if x[1] != '*']
schema_names = table_names + column_names + ['average', 'maximum', 'minimum', 'sum']
schema_name_vocab = {token: schema_names.count(token) for token in schema_names}
semql_statements = self.semql_converter.convert_sql_to_semql(self.all_database_info[db_id], question, predict_sql)
predict_semql: SemQLTree = SemQLTree.from_statements([str(_) for _ in semql_statements])
restatement = predict_semql.restatement()
# 1. run aligner model to get alignment scores
alignment_matrix, ids, tokens, lengths = self.aligner.calculate_alignment(question.lower(), restatement.lower())
tokens1, tokens2 = self.aligner.split_tokens(tokens, lengths)
# if len(tokens1) > len(tokens2):
# logger.error("NL longer than restate SQL")
# return
alignment_matrix = alignment_matrix.squeeze(0).detach().cpu().numpy()
# alignment_matrix = self.post_process_alignment(alignment_matrix=alignment_matrix,
# col_tokens=tokens1, row_tokens=tokens2,
# col_stopwords=STOP_WORD_LIST, row_stopwords=TEMPLATE_KEYWORDS)
for i, token in enumerate(tokens2):
alignment_matrix[:, i] /= schema_name_vocab.get(token, 1)
max_assignment_score, assignment = self.bipartite_graph_solver.find_max(alignment_matrix)
# assert (assignment[0] == list(range(lengths[0]))).all()
src_aggr_alignment_score = alignment_matrix[assignment[0], assignment[1]]
# draw_attention_hotmap(alignment_matrix, tokens1, tokens2)
# pkl.dump((alignment_matrix, tokens1, tokens2), open('matrix2.pkl', 'wb'))
self.question_generator.refresh(database=db_id, utterance_tokens=tokens1)
self.nl_modifier.refresh(database=db_id, utterance=question)
# 2. find knowledge span, ask for explanation
question_list = []
# exact match replacement
long_schema_names = sorted([x for x in schema_names if len(x.split()) >= 2], key=lambda x: -len(x.split()))
exact_match_onehot = [0 for _ in range(len(tokens))]
for name in long_schema_names:
st_char_position = question.lower().find(name)
if name != -1:
st_position = len(question[:st_char_position].split())
ed_position = st_position + len(name.split())
for _ in range(st_position, ed_position):
exact_match_onehot[_] = 1
asked_tokens = []
for position, score in enumerate(src_aggr_alignment_score):
asked_token = tokens1[position]
if asked_token in coterms or score > self.align_threshold:
continue
asked_tokens.append(asked_token)
if exact_match_onehot[position] == 0:
score = score * self._compute_score_rate(asked_token, schema_names)
if score < self.align_threshold:
if asked_token not in STOP_WORD_LIST:
token_question, options = self.get_question(token_idx=position)
schema_item = self.get_response(asked_token, options)
self.nl_modifier.modify(asked_token, schema_item)
# self.nl_to_schema[asked_token] = (schema_value.type, schema_value.value)
# 3. reparse the modified NL
new_question = self.nl_modifier.get_utterance()
return new_question
@staticmethod
def _compute_score_rate(token, schema_names):
cnt = 0
for schema_name in schema_names:
if token in schema_name:
cnt += 1
return 1.0 / max(cnt, 1)
@staticmethod
def restatement(node: SemQLTree, with_tag=False):
if with_tag:
return node.restatement_with_tag()
else:
return node.restatement()
def get_question(self, token_idx):
d = self.question_generator.generate_question(token_idx)
question = d['question']
options = d['options']
return question, options
def get_response(self, token, options) -> SchemaValue:
if self.mode == InteractiveSqlCorrector.SIMULATE_MODE:
schema_value = SchemaValue(options[0][2], options[0][0]) # todo: delete here
else:
print(token)
print('0: None')
print('1: It is a value')
for idx, option in enumerate(options):
print(f'{idx + 2}: {option[0]}')
# response = int(input())
response = 2
if response == 0:
schema_value = None
elif response == 1:
schema_name = ''
schema_type = SchemaValue.COLUMN_VALUE_TYPE
schema_value = SchemaValue(schema_type, schema_name)
else:
selected_option = options[response]
schema_name = selected_option[0]
if selected_option[2] is 'column':
schema_type = SchemaValue.COLUMN_NAME_TYPE
else:
schema_type = SchemaValue.TABLE_NAME_TYPE
schema_value = SchemaValue(schema_type, schema_name)
return schema_value
@staticmethod
def post_process_alignment(alignment_matrix, col_tokens, row_tokens, col_stopwords, row_stopwords):
weight_matrix = np.ones(alignment_matrix.shape)
for idx, col_token in enumerate(col_tokens):
if col_token in col_stopwords:
weight_matrix[idx, :] = 0.5
for idx, row_token in enumerate(row_tokens):
if row_token in row_stopwords:
weight_matrix[:, idx] = 0.5
alignment_matrix *= weight_matrix
return alignment_matrix
def generate_options_for_token(self, db_id, token, top=5):
def calculate_span_similarity(span1, span2):
set_span1 = set(span1.split())
set_span2 = set(span2.split())
return len(set(set_span1) & set(set_span2)) / len(set_span1 | set_span2)
# extract table names and column names from database info
database_info = self.all_database_info[db_id]
table_names = database_info['table_names']
column_names = [_[1] for _ in database_info['column_names'] if _[1] if not '*']
# compare token with table names and column names
names = [(name, 'table') for name in table_names] + [(name, 'column') for name in column_names]
names_with_score = []
for name, source in names:
score = calculate_span_similarity(token, name)
names_with_score.append((name, source, score))
names_with_score = sorted(names_with_score, key=lambda x: x[2], reverse=True)
if top != -1:
names_with_score = names_with_score[:top]
return [(name, source) for name, source, score in names_with_score]
def main():
interactive_sql_corrector = InteractiveSqlCorrector(aligner=None, mode='interact', human_simulator=HumanSimulator())
examples = json.load(open('data/spider/dev.json', 'r'))
predictions = open('data/parsers/irnet/output_origin.txt', 'r', encoding='utf-8').readlines()
assert len(examples) == len(predictions)
for i, (example, predict_sql) in enumerate(zip(examples, predictions)):
print(i)
interactive_sql_corrector.start_session(example, predict_sql)
if __name__ == '__main__':
logger = logging.getLogger(__name__)
main()
|
ContextualSP/interactive_text_to_sql/src/interactive_sql_corrector.py/0
|
{
"file_path": "ContextualSP/interactive_text_to_sql/src/interactive_sql_corrector.py",
"repo_id": "ContextualSP",
"token_count": 4853
}
| 232 |
import json
import sys
import copy
from itertools import combinations, permutations
import math
import argparse
from random import shuffle
from remove_same import big_file_remove_same
import os
parser = argparse.ArgumentParser()
parser.add_argument("--dataset_prefix", type=str, default='alchemy', help="dataset prefix")
parser.add_argument("--root_path", type=str, default='../corpus/', help="dataset prefix")
args = parser.parse_args()
args.corpus_file = os.path.join(args.root_path, '{}/pretraining_corpus_{}.txt'.format(args.dataset_prefix, args.dataset_prefix))
args.remove_same_file = os.path.join(args.root_path, '{}/temp.txt'.format(args.dataset_prefix))
args.train_source_file = os.path.join(args.root_path, '{}/train.src'.format(args.dataset_prefix))
args.train_target_file = os.path.join(args.root_path, '{}/train.tgt'.format(args.dataset_prefix))
args.dev_source_file = os.path.join(args.root_path, '{}/dev.src'.format(args.dataset_prefix))
args.dev_target_file = os.path.join(args.root_path, '{}/dev.tgt'.format(args.dataset_prefix))
big_file_remove_same(args.corpus_file, args.remove_same_file)
with open(args.remove_same_file, 'r') as f:
total_data_list = f.readlines()
print(len(total_data_list))
shuffle(total_data_list)
train_data_list = total_data_list[:-20000]
dev_data_list = total_data_list[-20000:]
fw_train_src = open(args.train_source_file, 'w')
fw_train_tgt = open(args.train_target_file, 'w')
fw_dev_src = open(args.dev_source_file, 'w')
fw_dev_tgt = open(args.dev_target_file, 'w')
for item in train_data_list:
try:
action, prev_state, current_state = item.split('\t')
except:
continue
src_row = ' SEP '.join([prev_state.strip(), action.strip()])
tgt_row = current_state.strip()
fw_train_src.write(src_row)
fw_train_src.write('\n')
fw_train_tgt.write(tgt_row)
fw_train_tgt.write('\n')
for item in dev_data_list:
try:
action, prev_state, current_state = item.split('\t')
except:
continue
src_row = ' SEP '.join([prev_state.strip(), action.strip()])
tgt_row = current_state.strip()
fw_dev_src.write(src_row)
fw_dev_src.write('\n')
fw_dev_tgt.write(tgt_row)
fw_dev_tgt.write('\n')
|
ContextualSP/lemon/corpus_generation/corpus_generation_split_newformat.py/0
|
{
"file_path": "ContextualSP/lemon/corpus_generation/corpus_generation_split_newformat.py",
"repo_id": "ContextualSP",
"token_count": 912
}
| 233 |
from collections import Mapping
from os.path import join
import logging
from git import Repo, exc as git_exc
from gtd.io import IntegerDirectories, Workspace
from gtd.log import SyncedMetadata
from gtd.utils import Config, cached_property
class ExperimentWorkspace(Workspace):
def __init__(self, root):
super(ExperimentWorkspace, self).__init__(root)
for attr in ['config', 'metadata']:
self.add_file(attr, '{}.txt'.format(attr))
self.add_dir('checkpoints', 'checkpoints')
class Experiment(object):
def __init__(self, config, save_dir):
"""Create experiment.
Args:
config (Config)
save_dir (str)
"""
self._config = config
self._workspace = ExperimentWorkspace(save_dir)
@property
def config(self):
return self._config
@property
def workspace(self):
return self._workspace
@cached_property
def metadata(self):
return SyncedMetadata(self.workspace.metadata)
def record_commit(self, src_dir):
try:
repo = Repo(src_dir)
if 'dirty_repo' in self.metadata or 'commit' in self.metadata:
raise RuntimeError('A commit has already been recorded.')
self.metadata['dirty_repo'] = repo.is_dirty()
self.metadata['commit'] = repo.head.object.hexsha.encode('utf-8')
except git_exc.InvalidGitRepositoryError as e:
# Maybe not a git repo e.g., running on CodaLab
self.metadata['dirty_repo'] = False
self.metadata['commit'] = 'NONE'
def match_commit(self, src_dir):
"""Check that the current commit matches the recorded commit for this experiment.
Raises an error if commits don't match, or if there is dirty state.
Args:
src_dir (str): path to the Git repository
"""
if self.metadata['dirty_repo']:
raise EnvironmentError('Working directory was dirty when commit was recorded.')
repo = Repo(src_dir)
if repo.is_dirty():
raise EnvironmentError('Current working directory is dirty.')
current_commit = repo.head.object.hexsha.encode('utf-8')
exp_commit = self.metadata['commit']
if current_commit != exp_commit:
raise EnvironmentError("Commits don't match.\nCurrent: {}\nRecorded: {}".format(current_commit, exp_commit))
class TFExperiment(Experiment):
def __init__(self, config, save_dir):
super(TFExperiment, self).__init__(config, save_dir)
self._workspace.add_dir('tensorboard', 'tensorboard')
@cached_property
def saver(self):
from gtd.ml.utils import Saver
return Saver(self.workspace.checkpoints, keep_checkpoint_every_n_hours=5)
@cached_property
def tb_logger(self):
from gtd.ml.utils import TensorBoardLogger
return TensorBoardLogger(self.workspace.tensorboard)
class Experiments(Mapping):
"""A map from integers to Experiments."""
def __init__(self, root_dir, src_dir, experiment_factory, default_config_path, check_commit=True):
"""Create Experiments object.
Args:
root_dir (str): directory where all experiment data will be stored
src_dir (str): a Git repository path (used to check commits)
experiment_factory (Callable[[Config, str], Experiment]): a Callable, which takes a Config and a save_dir
as arguments, and creates a new Experiment.
default_config_path (str): path to a default config, to be used when no config is specified
check_commit (bool): if True, checks that current working directory is on same commit as when the experiment
was originally created.
"""
self._int_dirs = IntegerDirectories(root_dir)
self._src_dir = src_dir
self._exp_factory = experiment_factory
self._check_commit = check_commit
self._default_config_path = default_config_path
def _config_path(self, save_dir):
return join(save_dir, 'config.txt')
def __getitem__(self, i):
"""Reload an existing Experiment."""
save_dir = self._int_dirs[i]
config = Config.from_file(self._config_path(save_dir))
exp = self._exp_factory(config, save_dir)
if self._check_commit:
exp.match_commit(self._src_dir)
logging.info('Reloaded experiment #{}'.format(i))
return exp
def new(self, config=None, name=None):
"""Create a new Experiment."""
if config is None:
config = Config.from_file(self._default_config_path)
save_dir = self._int_dirs.new_dir(name=name)
cfg_path = self._config_path(save_dir)
config.to_file(cfg_path) # save the config
exp = self._exp_factory(config, save_dir)
exp.record_commit(self._src_dir)
logging.info('New experiment created at: {}'.format(exp.workspace.root))
logging.info('Experiment configuration:\n{}'.format(config))
return exp
def __iter__(self):
return iter(self._int_dirs)
def __len__(self):
return len(self._int_dirs)
def paths(self):
return list(self._int_dirs.values())
|
ContextualSP/lemon/executor/gtd/ml/experiment.py/0
|
{
"file_path": "ContextualSP/lemon/executor/gtd/ml/experiment.py",
"repo_id": "ContextualSP",
"token_count": 2133
}
| 234 |
from unittest import TestCase
import os
import numpy as np
import pytest
import tensorflow as tf
from numpy.testing import assert_array_equal, assert_array_almost_equal
from tensorflow.python.framework.errors import InvalidArgumentError
from gtd.ml.utils import TensorDebugger, clean_session, expand_dims_for_broadcast, broadcast, Saver, \
guarantee_initialized_variables, gather_2d
from gtd.tests.ml.test_framework import clean_test_session
class TestTensorDebugger(TestCase):
def test_tensor_debugger_deps(self):
tdb = TensorDebugger()
x = tf.constant(3, name='x')
z = tf.mul(x, 3, name='z')
with tf.control_dependencies([x]):
y = tf.constant(8, name='y')
deps = tdb.dependency_graph
# control dependencies depend on x's output
self.assertEqual(deps['y'], {'x:0'})
# each output depends on its op
self.assertEqual(deps['y:0'], {'y'})
# downstream ops depend on the output of earlier ops
self.assertTrue('x:0' in deps['z'])
def test_tensor_debugger_multiple(self):
tdb = TensorDebugger()
x = tf.constant([1, 2])
tdb.register('x', x)
zs = []
for k in range(3):
y = tf.constant(k)
z = tf.reduce_sum(x * y)
# register multiple nodes under the same name
tdb.register('y', y)
zs.append(z)
# 0, (1 + 2), (2 + 4)
final = tf.pack(zs)
with tf.Session() as sess:
results, bp_results = tdb.debug(sess, final, {})
def test(a, b):
self.assertTrue(np.array_equal(a, b))
# result correctly passed back
test(results, [0, 3, 6])
# values in for loop accumulated as list
test(bp_results['y'], [0, 1, 2])
def test_tensor_debugger_exec_path(self):
tdb = TensorDebugger()
x = tf.constant(5, name='x')
y = tf.placeholder(tf.int32, name='y')
z = tf.mul(x, y, 'z')
w = tf.constant(4, name='w')
f = tf.mul(z, w, 'f')
g = tf.constant(3, name='g')
with tf.control_dependencies([f]):
h = tf.constant(11, name='h')
# don't register x
tdb.register('y', y)
tdb.register('z', z)
tdb.register('w', w)
tdb.register('f', f)
tdb.register('g', g, force_run=True)
tdb.register('h', h)
with tf.Session() as sess:
result, bp_results = tdb.debug(sess, f, {y: 2})
# result is a single value, not a list
self.assertEqual(result, 40)
# excludes x, because not registered. excludes h, because not on execution path.
# includes g, because of force_run
self.assertEqual(bp_results, {'y': 2, 'z': 10, 'w': 4, 'g': 3})
results, bp_results = tdb.debug(sess, [h, g], {y: 2})
# returns a list
self.assertEqual(results, [11, 3])
# includes y, z, w and f because h depends on them through control_dependencies
# includes g because of force_run
self.assertEqual(bp_results, {'y': 2, 'z': 10, 'f': 40, 'w': 4, 'g': 3})
def test_expand_dims_for_broadcast():
with clean_session():
arr = tf.constant([
[
[1, 2, 3],
[4, 5, 6],
[4, 5, 6],
],
[
[1, 2, 3],
[4, 5, 6],
[4, 5, 6],
],
], dtype=tf.float32)
weights = tf.constant([1, 2], dtype=tf.float32)
assert arr.get_shape().as_list() == [2, 3, 3]
assert weights.get_shape().as_list() == [2]
new_weights = expand_dims_for_broadcast(weights, arr)
assert new_weights.eval().shape == (2, 1, 1)
bad_weights = tf.constant([1, 2, 3], dtype=tf.float32)
bad_new_weights = expand_dims_for_broadcast(bad_weights, arr)
with pytest.raises(InvalidArgumentError):
bad_new_weights.eval()
class TestGather2D(object):
@pytest.fixture
def x(self):
x = tf.constant([
[[1, 2], [2, 2], [3, 3]],
[[4, 5], [5, 4], [6, 6]],
[[7, 7], [8, 7], [9, 9]],
[[0, 8], [1, 1], [2, 2]]
], dtype=tf.int32)
return x
@pytest.mark.usefixtures('clean_test_session')
def test(self, x):
i = tf.constant([[0, 2],
[3, 0]],
dtype=tf.int32)
j = tf.constant([[1, 1],
[0, 2]],
dtype=tf.int32)
vals = gather_2d(x, i, j)
correct = np.array([
[[2, 2], [8, 7]],
[[0, 8], [3, 3]],
], dtype=np.int32)
assert_array_almost_equal(correct, vals.eval())
assert vals.get_shape().as_list() == [2, 2, 2]
@pytest.mark.usefixtures('clean_test_session')
def test_broadcast(self, x):
i = tf.constant([[0, 2],
[3, 0]],
dtype=tf.int32)
j = tf.constant([[1, 2]], dtype=tf.int32) # needs to be broadcast up
vals = gather_2d(x, i, j)
correct = np.array([
[[2, 2], [9, 9]],
[[1, 1], [3, 3]],
], dtype=np.int32)
assert_array_almost_equal(correct, vals.eval())
def test_broadcast():
with clean_session():
values = tf.constant([
[
[1, 2],
[1, 2],
],
[
[1, 2],
[3, 4],
],
[
[5, 6],
[7, 8],
]
], dtype=tf.float32)
mask = tf.constant([
[1, 0],
[1, 1],
[0, 1],
], dtype=tf.float32)
correct = np.array([
[
[1, 1],
[0, 0],
],
[
[1, 1],
[1, 1],
],
[
[0, 0],
[1, 1],
]
], dtype=np.float32)
assert values.get_shape().as_list() == [3, 2, 2]
assert mask.get_shape().as_list() == [3, 2]
mask = expand_dims_for_broadcast(mask, values)
assert mask.get_shape().as_list() == [3, 2, 1]
mask = broadcast(mask, values)
assert mask.get_shape().as_list() == [3, 2, 2]
mask_val = mask.eval()
assert_array_equal(mask_val, correct)
class TestSaver(object):
@pytest.fixture
def v(self):
return tf.get_variable('v', shape=[], initializer=tf.constant_initializer(5))
@pytest.mark.usefixtures('clean_test_session')
def test_restore(self, tmpdir, v):
save_100_path = str(tmpdir.join('weights-100'))
save_10_path = str(tmpdir.join('weights-10'))
saver = Saver(str(tmpdir))
assign_op = tf.assign(v, 12)
sess = tf.get_default_session()
guarantee_initialized_variables(sess)
assert v.eval() == 5
saver.save(100) # save as step 100
sess.run(assign_op)
assert v.eval() == 12
saver.save(10) # save as step 10
saver.restore() # restores from the larger step number by default (100)
assert v.eval() == 5 # restored
saver.restore(10) # force restore number 10
assert v.eval() == 12
saver.restore(save_100_path)
assert v.eval() == 5
# latest should be the largest step number, not necessarily last saved
assert saver.latest_checkpoint == save_100_path
assert os.path.exists(save_100_path)
assert saver.checkpoint_paths == {
10: save_10_path,
100: save_100_path,
}
|
ContextualSP/lemon/executor/gtd/tests/ml/test_utils.py/0
|
{
"file_path": "ContextualSP/lemon/executor/gtd/tests/ml/test_utils.py",
"repo_id": "ContextualSP",
"token_count": 4134
}
| 235 |
"""Store system evaluation results (e.g., accuracy)."""
from collections import OrderedDict
from codecs import open
from math import sqrt
import json
import numpy as np
import os
from scipy.stats import norm
class NumberSequenceStat(object):
"""Stores statistics of a sequence of numbers.
This is a reimplementation of fig's StatFig.
"""
def __init__(self):
self.s_count = 0
self.s_min = float('inf')
self.s_max = float('-inf')
self.s_min_key = None
self.s_max_key = None
self.s_sum = 0.
self.s_sumsq = 0.
def add(self, x, key=None):
if isinstance(x, NumberSequenceStat):
assert not key
self.s_count += x.s_count
self.s_sum += x.s_sum
self.s_sumsq += x.s_sumsq
if x.s_min < self.s_min:
self.s_min = x.s_min
self.s_min_key = x.s_min_key
if x.s_max > self.s_max:
self.s_max = x.s_max
self.s_max_key = x.s_max_key
elif isinstance(x, (list, tuple)):
x = [float(u) for u in x]
self.s_count += len(x)
self.s_sum += sum(x)
self.s_sumsq += sum(u*u for u in x)
min_x = min(x)
if min_x < self.s_min:
self.s_min = min_x
self.s_min_key = key
max_x = max(x)
if max_x > self.s_max:
self.s_max = max_x
self.s_max_key = key
else:
x = float(x)
self.s_count += 1
self.s_sum += x
self.s_sumsq += x * x
if x < self.s_min:
self.s_min = x
self.s_min_key = key
if x > self.s_max:
self.s_max = x
self.s_max_key = key
@property
def count(self):
return self.s_count
@property
def mean(self):
return self.s_sum / self.s_count
@property
def sum(self):
return self.s_sum
@property
def variance(self):
return self.s_sumsq / self.s_count - self.mean ** 2
@property
def stddev(self):
return self.variance ** .5
@property
def min(self):
return self.s_min
@property
def max(self):
return self.s_max
@property
def min_key(self):
return self.s_min_key
@property
def max_key(self):
return self.s_max_key
@property
def range(self):
return self.s_max - self.s_min
def __str__(self):
if not self.s_count:
return "NaN (0)"
return "{min}{min_key} << {mean} >> {max}{max_key} ({std} std {count} count)".format(
min=FmtD(self.s_min), min_key=('@' + self.s_min_key if self.s_min_key else ''),
mean=FmtD(self.mean), std=FmtD(self.stddev),
max=FmtD(self.s_max), max_key=('@' + self.s_max_key if self.s_max_key else ''),
count=self.s_count)
def as_dict(self):
if not self.s_count:
return {'count': 0}
return {
'count': self.s_count,
'min': self.s_min,
'mean': self.mean,
'stddev': self.stddev,
'max': self.s_max,
'sum': self.s_sum,
}
class BernoulliSequenceStat(NumberSequenceStat):
"""A NumberSequenceStat which assumes each value in the sequence is drawn i.i.d. from a Bernoulli."""
def __init__(self, number_seq_stat=None):
super(BernoulliSequenceStat, self).__init__()
if number_seq_stat:
self.add(number_seq_stat)
def __str__(self):
left, right = self.confidence_interval(0.05)
ci_str = " 95% CI = [{} - {}]".format(left, right)
s = super(BernoulliSequenceStat, self).__str__()
return s + ci_str
@classmethod
def _confidence_interval_by_z_wald(cls, p_hat, n, z):
increment = z * sqrt(p_hat * (1 - p_hat) / n)
return p_hat - increment, p_hat + increment
@classmethod
def _confidence_interval_by_z_wilson(cls, p_hat, n, z):
"""Compute confidence interval for estimate of Bernoulli parameter p.
Args:
p_hat: maximum likelihood estimate of p
n: samples observed
z: if z = standard_normal_quantile(1 - alpha/2), then alpha is the probability that the
true p falls outside the CI.
Uses the Wilson score interval to compute a confidence interval
for the true underlying Bernoulli parameter p.
Should behave well even when p is close to 0 or 1 and when n is not too large.
https://en.wikipedia.org/wiki/Binomial_proportion_confidence_interval#Wilson_score_interval
Returns:
left, right
"""
z2 = z**2
n2 = n**2
numerator = lambda sign: p_hat + z2 / (2 * n) + \
sign * z * sqrt(p_hat * (1 - p_hat) / n + z2 / (4 * n2))
denominator = 1 + z2 / n
left = numerator(-1.) / denominator
right = numerator(1.) / denominator
return left, right
@classmethod
def _confidence_interval_by_alpha(cls, p_hat, n, alpha, method='wald'):
"""Compute confidence interval for estimate of Bernoulli parameter p.
Args:
p_hat: maximum likelihood estimate of p
n: samples observed
alpha: the probability that the true p falls outside the CI
Returns:
left, right
"""
prob = 1 - 0.5 * alpha
z = norm.ppf(prob)
compute_ci = cls._confidence_interval_by_z_wald if method == 'wald' else cls._confidence_interval_by_z_wilson
return compute_ci(p_hat, n, z)
def confidence_interval(self, alpha):
p_hat = self.mean
n = self.count
return self._confidence_interval_by_alpha(p_hat, n, alpha)
def test_bernoulli_confidence_interval(method='wilson', trials=1000, ps=None):
"""Use this to compare performance of Wald vs Wilson CIs.
You should see that Wilson does better for extreme values.
Args:
method: 'wilson' or 'wald'
trials: # trials used to empirically estimate coverage probability
"""
if ps is None:
ps = np.arange(0.05, 0.95, 0.05)
n = 200 # observations
alpha = 0.1 # desired prob of CI not covering the true p
# run simulations to see if the computed CI has the desired coverage prob
alpha_hats = []
for p in ps:
misses = 0.
for _ in range(int(trials)):
samples = np.random.random(n) <= p # draw n Bernoulli's
p_hat = np.mean(samples) # compute estimate
left, right = BernoulliSequenceStat._confidence_interval_by_alpha(p_hat, n, alpha, method=method)
if p < left or p > right:
misses += 1
alpha_hat = misses / trials
alpha_hats.append(alpha_hat)
import matplotlib.pyplot as plt
plt.plot(ps, alpha_hats) # this line should be close to the constant alpha for all values of p
def FmtD(x):
"""Return a nicely formatted string for number x."""
if abs(x - round(x)) < 1e-40:
return str(int(x))
if abs(x) < 1e-3:
return "{:.2e}".format(x)
return "{:.3f}".format(x)
class Evaluation(object):
"""Stores various statistics."""
def __init__(self):
self.stats = OrderedDict()
def add(self, name, value, key=None, stat_type=NumberSequenceStat):
"""Add a statistic.
Args:
name (string): Name of the metric
value (bool, int, or float): The value
key (any): (optional) ID of the object that achieves this value
"""
if name not in self.stats:
self.stats[name] = stat_type()
stat = self.stats[name]
assert isinstance(stat, stat_type)
stat.add(value, key=key)
def add_micro_macro(self, name, values, key=None):
"""Add two stats:
- micro-averaging: average the values in each sequence first
- macro-averaging: average all values together.
"""
# Micro
stat = NumberSequenceStat()
stat.add(values)
self.add(name + '_micro', stat, key=key)
# Macro
if stat.count:
self.add(name + '_macro', stat.mean, key=key)
def add_evaluation(self, evaluation):
"""Add all statistics from another Evaluation object."""
for name, stat in evaluation.stats.items():
self.add(name, stat)
def line_summarize(self, prefix='EVAL', delim=' '):
"""Return a short one-line summary string."""
stuff = []
for name, stat in self.stats.items():
if not stat.count:
stuff.append(name + '=NaN')
else:
stuff.append(name + '=' + FmtD(stat.mean))
return prefix + ': ' + delim.join(stuff)
def summarize(self, buffer, prefix='EVAL'):
"""Print an extensive summary.
Args:
buffer: can be a file or a StringIO object
"""
header = '===== SUMMARY for %s =====' % prefix
buffer.write(header)
buffer.write('\n')
# Padding needed for aligning the key names
pad = '{:' + str(max(len(x) for x in self.stats)) + '}'
for name, stat in self.stats.items():
buffer.write(('[{}] ' + pad + ' : {}').format(prefix, name, stat))
buffer.write('\n')
buffer.write('=' * len(header))
buffer.write('\n')
def json_summarize(self, json_filename, step, prefix=None):
flags = 'r+' if os.path.exists(json_filename) else 'w+'
with open(json_filename, flags) as json_file:
text = json_file.read()
json_file.seek(0)
if len(text) == 0:
log = {}
else:
log = json.loads(text)
stats_dict = self.as_dict(prefix)
for name, stat in stats_dict.items():
if name in log:
log[name].append(stat['mean'])
else:
log[name] = [stat['mean']]
json.dump(log, json_file)
json_file.truncate()
def tboard_summarize(self, tb_logger, step, prefix=None):
"""Log evaluation to Tensorboard.
Args:
tb_logger (TensorBoardLogger)
step (int)
prefix (basestring)
"""
for name, stat in self.stats.items():
full_name = '{}_{}'.format(prefix, name) if prefix else name
tb_logger.log(full_name, stat.mean, step)
def as_dict(self, prefix=None):
"""Return a dict representation of the Evaluation."""
result = {}
for name, stat in self.stats.items():
full_name = '{}_{}'.format(prefix, name) if prefix else name
result[full_name] = stat.as_dict()
return result
|
ContextualSP/lemon/executor/strongsup/evaluation.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/evaluation.py",
"repo_id": "ContextualSP",
"token_count": 5294
}
| 236 |
from prettytable import PrettyTable
from strongsup.results.recipe import RLongCookbook
class TableDrawer(object):
"""Given a list of Entries, draws tables based on some criteria.
Args:
entries (list[Entry]): the entries
name (string): the name of this table (typically the dataset from which
the entries come)
"""
def __init__(self, entries, name):
self._entries = entries
self._name = name
def avg_table(self, final=False):
"""Returns the ASCII string of the table corresponding to the
results for these entries, averaged over all seeds.
Args:
final (bool): table contains Final results if True, otherwise
valid results
Returns:
string: the table
"""
entries = sorted(self._entries,
key=lambda entry: entry.avg,
reverse=True)
table = PrettyTable()
table.field_names = self._header(final)
cookbook = RLongCookbook()
for entry in entries:
etype_name = cookbook.get_recipe_name(
entry.experiment_type.configs, entry.experiment_type.base)
if etype_name is None:
etype_name = str(entry.experiment_type)
name = "{}-{}".format(
self._name, truncate(etype_name))
result = entry.avg
row = [name]
if final:
row = row + [result.overall_final_acc] + result.final_accs
else:
row = row + [result.overall_valid_acc] + result.valid_accs
table.add_row(row)
return table
def all_table(self, final=False):
"""Table with all the seeds.
Args:
final (bool): table contains Final results if True, otherwise
valid results
Returns:
string: the table
"""
rows = sorted(((entry, seed) for entry in self._entries
for seed in entry.seeds),
key=lambda entry_seed: entry_seed[0].get_value(entry_seed[1]),
reverse=True)
table = PrettyTable()
table.field_names = self._header(final)
cookbook = RLongCookbook()
for entry, seed in rows:
etype_name = cookbook.get_recipe_name(
entry.experiment_type.configs, entry.experiment_type.base)
if etype_name is None:
etype_name = str(entry.experiment_type)
name = "{}-{}-{}".format(
self._name, truncate(etype_name), seed)
result = entry.get_value(seed)
row = [name]
if final:
row = row + [result.overall_final_acc] + result.final_accs
else:
row = row + [result.overall_valid_acc] + result.valid_accs
table.add_row(row)
return table
# TODO: Clean up...
def stddev_table(self, final=False):
"""Table with stddevs"""
entries = sorted(self._entries,
key=lambda entry: entry.avg,
reverse=True)
table = PrettyTable()
acc_type = "Final" if final else "Valid"
header = ["Experiment Type"]
for i in range(1, 6):
header.append("{} stddev {} utt".format(acc_type, i))
table.field_names = header
cookbook = RLongCookbook()
for entry in entries:
etype_name = cookbook.get_recipe_name(
entry.experiment_type.configs, entry.experiment_type.base)
if etype_name is None:
etype_name = str(entry.experiment_type)
name = "{}-{}".format(
self._name, truncate(etype_name))
stddev = entry.var.sqrt()
row = [name]
if final:
row = row + stddev.final_accs
else:
row = row + stddev.valid_accs
table.add_row(row)
return table
def _header(self, final=False):
acc_type = "Final" if final else "Valid"
header = ["Experiment Type", "Overall {} Acc".format(acc_type)]
for i in range(1, 6):
header.append("{} Acc {} utt".format(acc_type, i))
return header
def truncate(s):
truncate_len = 50
return s[:truncate_len - 3] + "..." if len(s) > truncate_len else s
|
ContextualSP/lemon/executor/strongsup/results/table_drawer.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/results/table_drawer.py",
"repo_id": "ContextualSP",
"token_count": 2195
}
| 237 |
import csv
import os
from codecs import open
from strongsup.example import Context, Example
from strongsup.example_factory import ExampleFactory
from strongsup.predicate import Predicate
from strongsup.utils import EOU
from strongsup.tables.value import to_value_list
from strongsup.tables.utils import tsv_unescape, tsv_unescape_list, resolve_ptb_brackets
from strongsup.tables.world import WikiTableWorld
################################
# WikiTableExampleFactory
class WikiTableExampleFactory(ExampleFactory):
"""Read example from the WikiTableQuestions dataset from a TSV file.
The file should contain the following fields:
id, utterance, context, targetValue
- If the field `tokens` (CoreNLP tokenization) is present, use it instead of `utterance`
- If the field `targetCanon` is present, also use it to construct more accurate target values
- If supervised = True, the file should also have the field `logicalForm`
"""
def __init__(self, filename, supervised=False):
self._filename = filename
self._supervised = supervised
@property
def examples(self):
with open(self._filename, 'r', 'utf8') as fin:
header = fin.readline().rstrip('\n').split('\t')
for line in fin:
record = dict(list(zip(header, line.rstrip('\n').split('\t'))))
# Build Example
table_path = WikiTableWorld(record['context'])
if 'tokens' in record:
raw_utterance = resolve_ptb_brackets(tsv_unescape_list(record['tokens']))
else:
raw_utterance = tsv_unescape(record['utterance']).split()
context = Context(table_path, [raw_utterance])
answer = to_value_list(tsv_unescape_list(record['targetValue']),
tsv_unescape_list(record['targetCanon'])
if 'targetCanon' in record else None)
if not self._supervised:
logical_form = None
else:
logical_form_text = record.get('logicalForm', '')
if logical_form_text == 'None':
logical_form = None
else:
logical_form = []
for name in logical_form_text.split():
logical_form.append(Predicate(name, context))
if logical_form[-1] != EOU:
logical_form.append(Predicate(EOU, context))
example = Example(context, answer, logical_form)
yield example
################################
# Testing purposes
if __name__ == '__main__':
from dependency.data_directory import DataDirectory
factory = WikiTableExampleFactory(os.path.join(
DataDirectory.seq_questions, 'random-split-1-dev-processed.tsv'))
for i, ex in enumerate(factory.examples):
print(ex.context.utterances, ex.answer, ex.logical_form)
if i == 10:
exit(0)
|
ContextualSP/lemon/executor/strongsup/tables/example_factory.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/tables/example_factory.py",
"repo_id": "ContextualSP",
"token_count": 1323
}
| 238 |
import pytest
from strongsup.tables.predicates_computer import (
similarity_ratio,
)
class TestEditDistance(object):
CASES = [
('superman', 'superman', 0),
('kitten', 'sitting', 5),
('industry', 'interest', 8),
('to ardo', 'from ardo', 4),
('intend', 'interned', 2),
('saturday', 'sunday', 4),
('abbababb', 'babbabab', 2),
('bababaabba', 'babbabbaba', 4),
('bababaabba', 'baababbaba', 4),
('babadook', 'gagadook', 4),
('mickey', 'icky', 2),
('0000000000', '0000000000', 0),
('0000010000', '0000100000', 2),
]
def test_similarity_ratio(self):
for s1, s2, key in self.CASES:
for threshold in (x * .1 + 1e-3 for x in range(12)):
correct = 1 - key * 1. / (len(s1) + len(s2))
correct = correct if correct >= threshold else 0.
assert abs(similarity_ratio(s1, s2, threshold) - correct) < 1e-6
# TODO: Test other things
|
ContextualSP/lemon/executor/strongsup/tests/tables/test_predicates_computer.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/tests/tables/test_predicates_computer.py",
"repo_id": "ContextualSP",
"token_count": 557
}
| 239 |
python lemon/run_model_finetune.py \
--dataset-dir lemon_data/dataset/DATASET_PREFIX/bin_large \
--exp-dir OUTPUT_PATH \
--model-path PRE_TRAINED_MODEL_PATH \
--model-arch bart_large \
--total-num-update 10000 \
--batch-size 64 \
--gradient-accumulation 1 \
--warmup-steps 1500 \
--learning-rate 3e-5
|
ContextualSP/lemon/finetune.sh/0
|
{
"file_path": "ContextualSP/lemon/finetune.sh",
"repo_id": "ContextualSP",
"token_count": 146
}
| 240 |
## eQASC Evaluator
This script evaluates predictions for eQASC predictions against ground truth annotations and produces metrics.
Hint: If you are in a hurry and want to simply evaluate your predictions, run the evaluator in Docker.
## Usage
The program [evaluator.py](allennlp_reasoning_explainqa/evaluator/evaluator.py) takes three arguments:
1. The filename of a prediction file.
2. The filename of the labels to evaluate against.
3. The filename where metrics will be written.
### Prediction file
The predictions file should hold multiple JSON objects, with each object having a score, and a chain ID. For example:
```bash
% cat predictions/grc.test.predict | head -n 4
{"score": 0.2023383378982544, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_1"}
{"score": 0.5158032774925232, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_2"}
{"score": 0.17925743758678436, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_5"}
{"score": 0.8793290853500366, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_7"}
```
The chain IDs must match those in the labels. (See below.)
The file `predictions/grc.test.predict` in this repo contains an example
prediction for the test labels. It was made with the script
[grc.sh](https://github.com/harsh19/Reasoning-Chains-MultihopQA/blob/evaluator/code/scripts/grc.sh#L43)
### Labels file
The labels file holds a single JSON object with keys being chain IDs and values
being labels. It looks like this:
```
% cat ../data/chainid_to_label_test.json
{"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_7": 0,
"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_8": 0,
"3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_6": 1,
...
```
The file `../data/chainid_to_label_test.json` in this repo
contains the labels for test chains.
### Output metrics
A "metrics" file will be written that contains evaluation scores.
This file holds a single JSON structure with three key-value pairs. The keys are:
* `auc_roc` -- This is Area under the ROC curve which measures classification problem at various thresholds settings. The ROC curve is plotted with TPR against the FPR where TPR is on y-axis and FPR is on the x-axis. Value is always between 0 and 1 (with 1 representing the best performance).
* `explainP1` -- This is precision@1 metric, which measures the fraction of cases where the highest scoring candidate chain is a valid reasoning explanation. Value is always between 0 and 1 (with 1 representing the best performance).
* `explainNDCG` - This is Normalized Discounted Cumulative Gain (https://en.wikipedia.org/wiki/Discounted_cumulative_gain#Normalized_DCG) to measure the ranking performance. Value is between 0 and 1 (with 1 representing the best performance), with highest score 1 when all the valid ranking chains are ranked better than all the invalid reasoning chains.
Example:
```bash
% cat metrics.json
{"auc_roc": 0.8457533894216488, "explainP1": 0.5387978142076503, "explainNDCG": 0.6376201537170901}
```
## Running in Docker
The eQASC evaluator has many dependencies, so if you only want to run the
evaluator on a prediction file, this is the easiest way to do so, without
setting up a local development environment (Conda) with those dependencies
installed.
First, build an image with the evaluator:
```
docker build -t eqasc-evaluator .
```
Then run it with the above files like this:
```
docker run \
-v $PWD/predictions:/predictions:ro \
-v $PWD/../data:/labels:ro \
-v /tmp:/output:rw \
--entrypoint python \
eqasc-evaluator \
allennlp_reasoning_explainqa/evaluator/evaluator.py \
/predictions/grc.test.predict \
/labels/chainid_to_label_test.json \
/output/metrics.json
```
This evaluates the file `predictions/grc.test.predict` against the labels in
`../data/chainid_to_label_test.json`, and writes the file
`/tmp/metrics.json` locally:
```
% cat /tmp/metrics.json
{"auc_roc": 0.8457533894216488, "explainP1": 0.5387978142076503, "explainNDCG": 0.6376201537170901}
```
See below for an explanation of the three arguments to the `evaluator.py` script.
## Running locally
You'll have to install dependencies with Conda, following the environment.yml file.
After you've done that, run the evaluator like this:
```bash
% env PYTHONPATH=. python allennlp_reasoning_explainqa/evaluator/evaluator.py predictions/grc.test.predict ../data/labels/chainid_to_label_test.json /tmp/metrics.json
```
This evaluates the file `predictions/grc.test.predict` against the labels in
`../data/chainid_to_label_test.json`, and writes the file
`/tmp/metrics.json` locally:
```
% cat /tmp/metrics.json
{"auc_roc": 0.8457533894216488, "explainP1": 0.5387978142076503, "explainNDCG": 0.6376201537170901}
```
## Testing
The script `test-with-docker.sh` uses the Docker method to exercise the
evaluator and confirm expected scores.
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/eqasc/code/README.md/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/eqasc/code/README.md",
"repo_id": "ContextualSP",
"token_count": 1610
}
| 241 |
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_1","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_10","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_2","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_3","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_4","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_5","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_6","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_7","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_8","score":0.5}
{"chain_id":"304SM51WA33FD6TGROJ4OS4ZWAQSB9_1_9","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_1","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_10","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_2","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_3","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_4","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_5","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_6","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_7","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_8","score":0.5}
{"chain_id":"30BUDKLTXDUCE77PPJ8MWP0SBTB5E2_1_9","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_1","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_10","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_2","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_3","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_4","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_5","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_6","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_7","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_8","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKARHW0_1_9","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_1","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_10","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_2","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_3","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_4","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_5","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_6","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_7","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_8","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIKBQHW1_1_9","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_1","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_10","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_2","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_3","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_4","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_5","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_6","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_7","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_8","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIM9AHWJ_1_9","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_1","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_10","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_2","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_3","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_4","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_5","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_6","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_7","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_8","score":0.5}
{"chain_id":"30BXRYBRP4WIB0DBBL317SAIN13WHC_1_9","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_1","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_10","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_2","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_3","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_4","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_5","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_6","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_7","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_8","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9F2PMY_1_9","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_1","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_10","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_2","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_3","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_4","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_5","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_6","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_7","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_8","score":0.5}
{"chain_id":"30H4UDGLT2HEJ5HLQW5J73AI9QGMPV_1_9","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_1","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_10","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_2","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_3","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_4","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_5","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_6","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_7","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_8","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP879AX0E_1_9","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_1","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_10","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_2","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_3","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_4","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_5","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_6","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_7","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_8","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP88BW0X8_1_9","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_1","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_10","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_2","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_3","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_4","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_5","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_6","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_7","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_8","score":0.5}
{"chain_id":"30IQTZXKAK5MP0C5NIS23JP8K6H0XV_1_9","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_1","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_10","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_2","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_3","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_4","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_5","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_6","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_7","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_8","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SW9MTQHR_1_9","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_1","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_10","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_2","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_3","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_4","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_5","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_6","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_7","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_8","score":0.5}
{"chain_id":"30JNVC0OR9JDR3HPZC4VF3SWWC2QH3_1_9","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_1","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_10","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_2","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_3","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_4","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_5","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_6","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_7","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_8","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JFYH0Z1_1_9","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_1","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_10","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_2","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_3","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_4","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_5","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_6","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_7","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_8","score":0.5}
{"chain_id":"30LB5CDZNC9BUVCKO0GN045JTWAZ03_1_9","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_1","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_10","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_2","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_3","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_4","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_5","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_6","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_7","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_8","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8DVX2I9_1_9","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_1","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_10","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_2","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_3","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_4","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_5","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_6","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_7","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_8","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8GIRI2W_1_9","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_1","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_10","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_2","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_3","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_4","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_5","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_6","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_7","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_8","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8KTI2IX_1_9","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_1","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_10","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_2","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_3","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_4","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_5","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_6","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_7","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_8","score":0.5}
{"chain_id":"30LSNF239UUWVFQO3JWFJXV8T9MI2M_1_9","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_1","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_10","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_2","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_3","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_4","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_5","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_6","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_7","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_8","score":0.5}
{"chain_id":"30MVJZJNHMC3QAVT6AWU5LIM19T9J8_1_9","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_1","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_10","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_2","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_3","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_4","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_5","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_6","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_7","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_8","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60ECYNENR_1_9","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_1","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_10","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_2","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_3","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_4","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_5","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_6","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_7","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_8","score":0.5}
{"chain_id":"30OG32W0SUAG4WDVTJ48Q60EZ24NEC_1_9","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_1","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_10","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_2","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_3","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_4","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_5","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_6","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_7","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_8","score":0.5}
{"chain_id":"3137ONMDKG4AU4W96FRD0MRH0AMGE7_1_9","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_1","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_10","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_2","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_3","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_4","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_5","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_6","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_7","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_8","score":0.5}
{"chain_id":"3180JW2OT4BKPNTH3KJDT5DKGY95JJ_1_9","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_1","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_10","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_2","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_3","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_4","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_5","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_6","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_7","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_8","score":0.5}
{"chain_id":"31EUONYN2V2FOSZTPOTV5ZO5TLBOVT_1_9","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_1","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_10","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_2","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_3","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_4","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_5","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_6","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_7","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_8","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04WUPLSE_1_9","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_1","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_10","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_2","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_3","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_4","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_5","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_6","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_7","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_8","score":0.5}
{"chain_id":"31HQ4X3T3S9RQFFSI18Y2V04XPKLS0_1_9","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_1","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_10","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_2","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_3","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_4","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_5","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_6","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_7","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_8","score":0.5}
{"chain_id":"31IBVUNM9SYLIFM0QLA5I5FRBNBFV5_1_9","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_1","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_10","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_2","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_3","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_4","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_5","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_6","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_7","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_8","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV89HRLRO_1_9","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_1","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_10","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_2","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_3","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_4","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_5","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_6","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_7","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_8","score":0.5}
{"chain_id":"31LVTDXBL79FP0FF3C8TCLV8MQTLRL_1_9","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_1","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_10","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_2","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_3","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_4","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_5","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_6","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_7","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_8","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN85N5F3K_1_9","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_1","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_10","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_2","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_3","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_4","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_5","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_6","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_7","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_8","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN87Y23FT_1_9","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_1","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_10","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_2","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_3","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_4","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_5","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_6","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_7","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_8","score":0.5}
{"chain_id":"31N2WW6R9RP166KH6B4ZZAN8AF53FX_1_9","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_1","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_10","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_2","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_3","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_4","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_5","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_6","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_7","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_8","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVU8ND710_1_9","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_1","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_10","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_2","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_3","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_4","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_5","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_6","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_7","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_8","score":0.5}
{"chain_id":"31Q0U3WYDPESHARW1OS4NPVUBEQ71Y_1_9","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_1","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_10","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_2","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_3","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_4","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_5","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_6","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_7","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_8","score":0.5}
{"chain_id":"31T4R4OBOSFC4D1UHLHO4LELEWXC7E_1_9","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_1","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_10","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_2","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_3","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_4","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_5","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_6","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_7","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_8","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YK4RPI56_1_9","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_1","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_10","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_2","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_3","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_4","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_5","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_6","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_7","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_8","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD0VI53_1_9","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_1","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_10","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_2","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_3","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_4","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_5","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_6","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_7","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_8","score":0.5}
{"chain_id":"31UV0MXWNQBOUC9BL2YDH4YKD10I5A_1_9","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_1","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_10","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_2","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_3","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_4","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_5","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_6","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_7","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_8","score":0.5}
{"chain_id":"31Z0PCVWUKETQU3537CETVEPGECT7Z_1_9","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_1","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_10","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_2","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_3","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_4","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_5","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_6","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_7","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_8","score":0.5}
{"chain_id":"320DUZ38G7LI5KI1KG24X249GZ5GJO_1_9","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_1","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_10","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_2","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_3","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_4","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_5","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_6","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_7","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_8","score":0.5}
{"chain_id":"323Q6SJS8IFG0ERGLWT134OI24RHFZ_1_9","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_1","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_10","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_2","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_3","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_4","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_5","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_6","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_7","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_8","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKMAGDEN_1_9","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_1","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_10","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_2","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_3","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_4","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_5","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_6","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_7","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_8","score":0.5}
{"chain_id":"326O153BMIX7IKMI4PQ5U1OKO6IDEJ_1_9","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_1","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_10","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_2","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_3","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_4","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_5","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_6","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_7","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_8","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6UDUSR_1_9","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_1","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_10","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_2","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_3","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_4","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_5","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_6","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_7","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_8","score":0.5}
{"chain_id":"32AT8R96GL8U952MRF0ZTIWN6VLSUZ_1_9","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_1","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_10","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_2","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_3","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_4","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_5","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_6","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_7","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_8","score":0.5}
{"chain_id":"32EYX73OY08I8Q29CQ0U38RRQ54UR9_1_9","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_1","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_10","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_2","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_3","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_4","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_5","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_6","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_7","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_8","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA33HLM9C_1_9","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_1","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_10","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_2","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_3","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_4","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_5","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_6","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_7","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_8","score":0.5}
{"chain_id":"32KTQ2V7RDETRI1E979MLDA3HWE9M0_1_9","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_1","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_10","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_2","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_3","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_4","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_5","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_6","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_7","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_8","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9Q6WIGP_1_9","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_1","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_10","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_2","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_3","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_4","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_5","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_6","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_7","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_8","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9QJRIGA_1_9","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_1","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_10","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_2","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_3","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_4","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_5","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_6","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_7","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_8","score":0.5}
{"chain_id":"32M8BPYGATLMA9YV2YH4L1P9UZZGIG_1_9","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_1","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_10","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_2","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_3","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_4","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_5","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_6","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_7","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_8","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440084VA2_1_9","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_1","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_10","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_2","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_3","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_4","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_5","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_6","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_7","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_8","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440D3RAV7_1_9","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_1","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_10","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_2","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_3","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_4","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_5","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_6","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_7","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_8","score":0.5}
{"chain_id":"32N49TQG3GHQMO5SF5OD4440ZUSAVC_1_9","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_1","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_10","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_2","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_3","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_4","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_5","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_6","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_7","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_8","score":0.5}
{"chain_id":"32Q90QCQ1SKFWQSSW6CSYEJA8JKEKA_1_9","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_1","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_10","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_2","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_3","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_4","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_5","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_6","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_7","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_8","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT7KMS45_1_9","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_1","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_10","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_2","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_3","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_4","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_5","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_6","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_7","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_8","score":0.5}
{"chain_id":"32RIADZISS3VS787C99HGEYT8H94SZ_1_9","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_1","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_10","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_2","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_3","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_4","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_5","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_6","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_7","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_8","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH64J3AE_1_9","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_1","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_10","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_2","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_3","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_4","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_5","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_6","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_7","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_8","score":0.5}
{"chain_id":"32SVAV9L3F86AF39VVI7L9CH81NA3L_1_9","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_1","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_10","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_2","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_3","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_4","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_5","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_6","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_7","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_8","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6LKUBV9_1_9","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_1","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_10","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_2","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_3","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_4","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_5","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_6","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_7","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_8","score":0.5}
{"chain_id":"32UTUBMZ7GVV5YAKLJO9ZFO6MWJVB7_1_9","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_1","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_10","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_2","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_3","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_4","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_5","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_6","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_7","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_8","score":0.5}
{"chain_id":"32VNZTT0A73JRS8UXK9CQ4TXQM34R6_1_9","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_1","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_10","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_2","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_3","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_4","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_5","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_6","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_7","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_8","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PY8UM2Y_1_9","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_1","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_10","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_2","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_3","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_4","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_5","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_6","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_7","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_8","score":0.5}
{"chain_id":"32XVDSJFPZWIRYGFOYU7BQ2PYFAM2S_1_9","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_1","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_10","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_2","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_3","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_4","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_5","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_6","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_7","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_8","score":0.5}
{"chain_id":"3300DTYQT2G17TQN9BWPU0VJ4TBEQ6_1_9","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_1","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_10","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_2","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_3","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_4","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_5","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_6","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_7","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_8","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWA532YC_1_9","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_1","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_10","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_2","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_3","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_4","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_5","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_6","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_7","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_8","score":0.5}
{"chain_id":"336KAV9KYQRILF5T71II5LPWJQ62YU_1_9","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_1","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_10","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_2","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_3","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_4","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_5","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_6","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_7","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_8","score":0.5}
{"chain_id":"336YQZE83VDAQVZ26HW59X51FSCM57_1_9","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_1","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_10","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_2","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_3","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_4","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_5","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_6","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_7","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_8","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBS7OLV3_1_9","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_1","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_10","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_2","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_3","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_4","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_5","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_6","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_7","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_8","score":0.5}
{"chain_id":"337RC3OW0517I7WWCWD3DIKBU4BLVM_1_9","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_1","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_10","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_2","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_3","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_4","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_5","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_6","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_7","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_8","score":0.5}
{"chain_id":"338JKRMM26YL4NA8AG79P3DRPY6HAI_1_9","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_1","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_10","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_2","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_3","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_4","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_5","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_6","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_7","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_8","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2V9NKIA_1_9","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_1","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_10","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_2","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_3","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_4","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_5","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_6","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_7","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_8","score":0.5}
{"chain_id":"339ANSOTR51RCVUESP2JJTH2X1ZKI8_1_9","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_1","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_10","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_2","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_3","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_4","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_5","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_6","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_7","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_8","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS3ZP3L8_1_9","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_1","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_10","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_2","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_3","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_4","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_5","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_6","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_7","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_8","score":0.5}
{"chain_id":"33CID57104SN6YUDSM7XUNSS5AAL3Z_1_9","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_1","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_10","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_2","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_3","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_4","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_5","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_6","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_7","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_8","score":0.5}
{"chain_id":"33F859I566CQNXF0GU75KEXX9WFBHT_1_9","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_1","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_10","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_2","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_3","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_4","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_5","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_6","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_7","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_8","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8ALHC1M_1_9","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_1","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_10","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_2","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_3","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_4","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_5","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_6","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_7","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_8","score":0.5}
{"chain_id":"33FOTY3KEMKYTRMSS50F3BN8BO91CA_1_9","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_1","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_10","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_2","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_3","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_4","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_5","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_6","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_7","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_8","score":0.5}
{"chain_id":"33IZTU6J810MQ9WHWKBMDPVR127XSG_1_9","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_1","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_10","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_2","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_3","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_4","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_5","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_6","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_7","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_8","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N5VENMZ_1_9","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_1","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_10","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_2","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_3","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_4","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_5","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_6","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_7","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_8","score":0.5}
{"chain_id":"33JKGHPFYCTEGK58AHSR3E5N6EKNM8_1_9","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_1","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_10","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_2","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_3","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_4","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_5","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_6","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_7","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_8","score":0.5}
{"chain_id":"33L7PJKHCGXXQ88ODC4K7VAKC4V8TB_1_9","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_1","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_10","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_2","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_3","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_4","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_5","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_6","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_7","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_8","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNC2JI1TN_1_9","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_1","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_10","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_2","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_3","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_4","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_5","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_6","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_7","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_8","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCC3IT1T_1_9","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_1","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_10","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_2","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_3","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_4","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_5","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_6","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_7","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_8","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCXL91TD_1_9","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_1","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_10","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_2","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_3","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_4","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_5","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_6","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_7","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_8","score":0.5}
{"chain_id":"33LKR6A5KEJFF8O3ERV5SLNCY7CT1H_1_9","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_1","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_10","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_2","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_3","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_4","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_5","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_6","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_7","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_8","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVGNBXRQ_1_9","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_1","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_10","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_2","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_3","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_4","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_5","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_6","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_7","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_8","score":0.5}
{"chain_id":"33M4IA01QG0APUW4HVBHNFQVH0JXRP_1_9","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_1","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_10","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_2","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_3","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_4","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_5","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_6","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_7","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_8","score":0.5}
{"chain_id":"33OOO72IVHKZ2BY1UOKP9H636K2CTP_1_9","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_1","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_10","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_2","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_3","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_4","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_5","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_6","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_7","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_8","score":0.5}
{"chain_id":"33PPUNGG384ZUPWJIDZ2K066QXAZRC_1_9","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_1","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_10","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_2","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_3","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_4","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_5","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_6","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_7","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_8","score":0.5}
{"chain_id":"33SA9F9TRXT6RQM9LKVVMPD55P4WE7_1_9","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_1","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_10","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_2","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_3","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_4","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_5","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_6","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_7","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_8","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPGYY94_1_9","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_1","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_10","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_2","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_3","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_4","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_5","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_6","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_7","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_8","score":0.5}
{"chain_id":"33TIN5LC049TLZQSSAXXF8XAPL3Y9J_1_9","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_1","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_10","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_2","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_3","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_4","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_5","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_6","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_7","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_8","score":0.5}
{"chain_id":"345LHZDEDXRQPOH710ZYLAOBJL33UG_1_9","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_1","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_10","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_2","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_3","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_4","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_5","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_6","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_7","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_8","score":0.5}
{"chain_id":"3483FV8BEEIG2VVN1INQMU53AHI62V_1_9","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_1","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_10","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_2","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_3","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_4","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_5","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_6","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_7","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_8","score":0.5}
{"chain_id":"34BBWHLWHAAI7VOVH3LM74BXMY7IWJ_1_9","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_1","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_10","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_2","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_3","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_4","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_5","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_6","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_7","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_8","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8NKR5YG_1_9","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_1","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_10","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_2","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_3","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_4","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_5","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_6","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_7","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_8","score":0.5}
{"chain_id":"34FNN24DCM8AKCOGPKKG3SS8O01Y5G_1_9","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_1","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_10","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_2","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_3","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_4","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_5","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_6","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_7","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_8","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0CWOV47_1_9","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_1","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_10","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_2","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_3","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_4","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_5","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_6","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_7","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_8","score":0.5}
{"chain_id":"34HJIJKLP5VBKZPB64EMR1I0LPQ4VD_1_9","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_1","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_10","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_2","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_3","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_4","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_5","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_6","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_7","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_8","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBW5XQI8_1_9","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_1","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_10","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_2","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_3","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_4","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_5","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_6","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_7","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_8","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBWR4QIN_1_9","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_1","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_10","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_2","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_3","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_4","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_5","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_6","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_7","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_8","score":0.5}
{"chain_id":"34J10VATJFXDNYS95UMGFFTBZIFQIJ_1_9","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_1","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_10","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_2","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_3","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_4","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_5","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_6","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_7","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_8","score":0.5}
{"chain_id":"34MAJL3QP4MR8QNSMU8G86QHHEF433_1_9","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_1","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_10","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_2","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_3","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_4","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_5","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_6","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_7","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_8","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL721O10X_1_9","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_1","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_10","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_2","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_3","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_4","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_5","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_6","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_7","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_8","score":0.5}
{"chain_id":"34Q075JO1XCEZZRCGP7V8AL74G901D_1_9","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_1","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_10","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_2","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_3","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_4","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_5","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_6","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_7","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_8","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2JLNLH8_1_9","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_1","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_10","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_2","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_3","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_4","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_5","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_6","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_7","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_8","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2M6FHL5_1_9","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_1","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_10","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_2","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_3","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_4","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_5","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_6","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_7","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_8","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2Q9HLHL_1_9","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_1","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_10","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_2","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_3","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_4","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_5","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_6","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_7","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_8","score":0.5}
{"chain_id":"34S6N1K2ZVI2061C77WZYHT2ZJGHL9_1_9","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_1","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_10","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_2","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_3","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_4","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_5","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_6","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_7","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_8","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUHE4NYM_1_9","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_1","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_10","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_2","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_3","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_4","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_5","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_6","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_7","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_8","score":0.5}
{"chain_id":"34S9DKFK73OEA1UMTKG2X9SUL6PYN6_1_9","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_1","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_10","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_2","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_3","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_4","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_5","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_6","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_7","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_8","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64Y6GFC0Y_1_9","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_1","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_10","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_2","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_3","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_4","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_5","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_6","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_7","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_8","score":0.5}
{"chain_id":"34T446B1C0DYM21AWMWFP64YJNDC0N_1_9","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_1","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_10","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_2","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_3","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_4","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_5","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_6","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_7","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_8","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHBM496V_1_9","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_1","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_10","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_2","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_3","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_4","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_5","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_6","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_7","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_8","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC1G962_1_9","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_1","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_10","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_2","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_3","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_4","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_5","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_6","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_7","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_8","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHC6C695_1_9","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_1","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_10","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_2","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_3","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_4","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_5","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_6","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_7","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_8","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHGPL69K_1_9","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_1","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_10","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_2","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_3","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_4","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_5","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_6","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_7","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_8","score":0.5}
{"chain_id":"34V1S5K3GS0R2FGMMR25WHDHQ7U693_1_9","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_1","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_10","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_2","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_3","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_4","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_5","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_6","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_7","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_8","score":0.5}
{"chain_id":"34X6J5FLPTX9I9CFNC7GRG8B8NLJQH_1_9","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_1","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_10","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_2","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_3","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_4","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_5","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_6","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_7","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_8","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQAZMMGM_1_9","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_1","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_10","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_2","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_3","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_4","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_5","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_6","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_7","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_8","score":0.5}
{"chain_id":"34YB12FSQYN86SOMNDFWDUWQBL6GM9_1_9","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_1","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_10","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_2","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_3","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_4","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_5","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_6","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_7","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_8","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42RBY0T3_1_9","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_1","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_10","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_2","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_3","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_4","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_5","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_6","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_7","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_8","score":0.5}
{"chain_id":"34Z02EIMISCF8J3LI8R5EG42YZT0TH_1_9","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_1","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_10","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_2","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_3","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_4","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_5","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_6","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_7","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_8","score":0.5}
{"chain_id":"351SEKWQS0G5U8EVLNEO79TT73UMD2_1_9","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_1","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_10","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_2","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_3","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_4","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_5","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_6","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_7","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_8","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RREHE4H1_1_9","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_1","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_10","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_2","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_3","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_4","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_5","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_6","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_7","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_8","score":0.5}
{"chain_id":"352YTHGROVC62YUR1FXIC0RRY9X4HO_1_9","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_1","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_10","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_2","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_3","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_4","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_5","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_6","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_7","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_8","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52X11B00A_1_9","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_1","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_10","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_2","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_3","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_4","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_5","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_6","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_7","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_8","score":0.5}
{"chain_id":"354GIDR5ZB5ESA6UK63VU52XYFS00G_1_9","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_1","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_10","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_2","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_3","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_4","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_5","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_6","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_7","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_8","score":0.5}
{"chain_id":"354P56DE9K2SBUQQE77OJQI3Z8F7SG_1_9","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_1","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_10","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_2","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_3","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_4","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_5","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_6","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_7","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_8","score":0.5}
{"chain_id":"358010RM5ES2I1DLQFGROCFYEOXVXT_1_9","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_1","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_10","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_2","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_3","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_4","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_5","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_6","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_7","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_8","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7I0PR7Y_1_9","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_1","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_10","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_2","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_3","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_4","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_5","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_6","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_7","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_8","score":0.5}
{"chain_id":"358UUM7WRZ2GAFQDZI7JTGD7R7GR7C_1_9","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_1","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_10","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_2","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_3","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_4","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_5","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_6","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_7","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_8","score":0.5}
{"chain_id":"35BLDD71I6WRNWD0RX4CLXV998WVZI_1_9","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_1","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_10","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_2","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_3","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_4","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_5","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_6","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_7","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_8","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNF8OR3XG_1_9","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_1","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_10","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_2","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_3","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_4","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_5","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_6","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_7","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_8","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFAOCX3X_1_9","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_1","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_10","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_2","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_3","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_4","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_5","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_6","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_7","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_8","score":0.5}
{"chain_id":"35DR22AR5DJI6SB265AL5HNFDZJX3T_1_9","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_1","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_10","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_2","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_3","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_4","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_5","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_6","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_7","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_8","score":0.5}
{"chain_id":"35GCEFQ6I5N7BNBH7NY6223QJQT3ZO_1_9","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_1","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_10","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_2","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_3","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_4","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_5","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_6","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_7","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_8","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JO8WEOW_1_9","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_1","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_10","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_2","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_3","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_4","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_5","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_6","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_7","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_8","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JP44OE7_1_9","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_1","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_10","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_2","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_3","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_4","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_5","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_6","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_7","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_8","score":0.5}
{"chain_id":"35GMH2SV3EGYMHD7UY4L5V0JR2ZOE0_1_9","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_1","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_10","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_2","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_3","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_4","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_5","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_6","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_7","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_8","score":0.5}
{"chain_id":"35H6S234SAZ81SEAJ1POK18FSIC65F_1_9","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_1","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_10","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_2","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_3","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_4","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_5","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_6","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_7","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_8","score":0.5}
{"chain_id":"35K3O9HUABC4G40EVVLVI1R5W1GFEA_1_9","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_1","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_10","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_2","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_3","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_4","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_5","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_6","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_7","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_8","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PL37GHUV_1_9","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_1","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_10","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_2","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_3","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_4","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_5","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_6","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_7","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_8","score":0.5}
{"chain_id":"35L9RVQFCOH5JWO6GLO0P4PLYF0UH3_1_9","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_1","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_10","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_2","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_3","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_4","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_5","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_6","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_7","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_8","score":0.5}
{"chain_id":"3634BBTX0OTGW920REBM3GPXTX7FIE_1_9","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_1","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_10","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_2","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_3","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_4","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_5","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_6","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_7","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_8","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4DM6G68_1_9","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_1","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_10","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_2","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_3","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_4","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_5","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_6","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_7","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_8","score":0.5}
{"chain_id":"369J354OFD96HP3U0X8FOYZ4F0U6GG_1_9","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_1","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_10","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_2","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_3","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_4","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_5","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_6","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_7","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_8","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G03RPYD0_1_9","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_1","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_10","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_2","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_3","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_4","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_5","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_6","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_7","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_8","score":0.5}
{"chain_id":"36AHBNMV1RB5OP394Q2Z14G05VOYD9_1_9","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_1","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_10","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_2","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_3","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_4","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_5","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_6","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_7","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_8","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE1FPMWF_1_9","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_1","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_10","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_2","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_3","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_4","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_5","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_6","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_7","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_8","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIE347WMN_1_9","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_1","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_10","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_2","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_3","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_4","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_5","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_6","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_7","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_8","score":0.5}
{"chain_id":"36NEMU28XFC43EEM2IJEZXIEZPHWMZ_1_9","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_1","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_10","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_2","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_3","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_4","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_5","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_6","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_7","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_8","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE0FCAEQ_1_9","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_1","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_10","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_2","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_3","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_4","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_5","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_6","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_7","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_8","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE16UAER_1_9","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_1","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_10","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_2","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_3","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_4","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_5","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_6","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_7","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_8","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GE4OWAEW_1_9","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_1","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_10","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_2","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_3","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_4","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_5","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_6","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_7","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_8","score":0.5}
{"chain_id":"36PW28KO4ZV9KDJ6KFZ340GEZPIEAJ_1_9","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_1","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_10","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_2","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_3","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_4","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_5","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_6","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_7","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_8","score":0.5}
{"chain_id":"36TFCYNS449X00I1LQZN9BOPQNKHXZ_1_9","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_1","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_10","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_2","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_3","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_4","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_5","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_6","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_7","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_8","score":0.5}
{"chain_id":"36V4Q8R5ZKZZJHI0Q9K8780SHVJQMS_1_9","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_1","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_10","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_2","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_3","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_4","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_5","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_6","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_7","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_8","score":0.5}
{"chain_id":"36W0OB37HWDM5VIGM8N86W40Z5AHZP_1_9","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_1","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_10","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_2","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_3","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_4","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_5","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_6","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_7","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_8","score":0.5}
{"chain_id":"36WLNQG78Z9E3NOYQTZZZB0KIUCBEV_1_9","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_1","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_10","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_2","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_3","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_4","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_5","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_6","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_7","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_8","score":0.5}
{"chain_id":"36ZN444YTRXA2MFTQHUCQAYBI2HIOR_1_9","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_1","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_10","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_2","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_3","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_4","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_5","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_6","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_7","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_8","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P4MDRTI_1_9","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_1","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_10","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_2","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_3","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_4","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_5","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_6","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_7","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_8","score":0.5}
{"chain_id":"373ERPL3YO738DNKCLAKYC5P6GZTRW_1_9","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_1","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_10","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_2","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_3","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_4","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_5","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_6","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_7","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_8","score":0.5}
{"chain_id":"374TNBHA8BUZDY7E9C8J13NZNZGQYM_1_9","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_1","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_10","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_2","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_3","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_4","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_5","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_6","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_7","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_8","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6RPOIA9_1_9","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_1","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_10","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_2","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_3","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_4","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_5","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_6","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_7","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_8","score":0.5}
{"chain_id":"378XPAWRUCCL0ILSGYPUPFE6U2PAIV_1_9","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_1","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_10","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_2","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_3","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_4","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_5","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_6","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_7","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_8","score":0.5}
{"chain_id":"379J5II41OFQGWAAH6OTDEWPRXSEL2_1_9","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_1","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_10","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_2","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_3","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_4","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_5","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_6","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_7","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_8","score":0.5}
{"chain_id":"37C0GNLMHF2355T3Y777IDW74JJ6DZ_1_9","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_1","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_10","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_2","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_3","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_4","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_5","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_6","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_7","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_8","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCAY6IBK_1_9","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_1","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_10","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_2","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_3","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_4","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_5","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_6","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_7","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_8","score":0.5}
{"chain_id":"37FMASSAYCQQJSQKMCPQKQYCBDBIBK_1_9","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_1","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_10","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_2","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_3","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_4","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_5","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_6","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_7","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_8","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQHDPP0_1_9","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_1","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_10","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_2","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_3","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_4","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_5","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_6","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_7","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_8","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQI2PPR_1_9","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_1","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_10","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_2","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_3","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_4","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_5","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_6","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_7","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_8","score":0.5}
{"chain_id":"37KGEN7NJ3PK48EYICXBO74DQX8PPR_1_9","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_1","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_10","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_2","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_3","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_4","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_5","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_6","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_7","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_8","score":0.5}
{"chain_id":"37M28K1J0QCHVT5YYGAU1GT65T9AJO_1_9","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_1","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_10","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_2","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_3","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_4","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_5","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_6","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_7","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_8","score":0.5}
{"chain_id":"37Q970SNZE7E08BOPRQFIGRQ18LS1N_1_9","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_1","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_10","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_2","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_3","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_4","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_5","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_6","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_7","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_8","score":0.5}
{"chain_id":"37QW5D2ZRGLWB8V9OCZUXQEYDG2S8K_1_9","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_1","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_10","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_2","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_3","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_4","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_5","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_6","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_7","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_8","score":0.5}
{"chain_id":"37TD41K0AH8YNBLM5OFZZCAH4WTSCB_1_9","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_1","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_10","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_2","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_3","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_4","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_5","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_6","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_7","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_8","score":0.5}
{"chain_id":"37TRT2X24QQME3AQ4UAQWRDCML7BJG_1_9","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_1","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_10","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_2","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_3","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_4","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_5","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_6","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_7","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_8","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92MU18RL_1_9","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_1","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_10","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_2","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_3","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_4","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_5","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_6","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_7","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_8","score":0.5}
{"chain_id":"37U1UTWH9VLKATVW9NZP7G92OXR8RJ_1_9","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_1","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_10","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_2","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_3","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_4","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_5","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_6","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_7","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_8","score":0.5}
{"chain_id":"37UEWGM5HT72ZTBBA2QAS6MUF53R1X_1_9","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_1","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_10","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_2","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_3","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_4","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_5","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_6","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_7","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_8","score":0.5}
{"chain_id":"37UQDCYH6XU83M7U82CTUD2A0D17VH_1_9","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_1","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_10","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_2","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_3","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_4","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_5","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_6","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_7","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_8","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9D2YWR_1_9","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_1","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_10","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_2","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_3","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_4","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_5","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_6","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_7","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_8","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5L9KVWYW_1_9","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_1","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_10","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_2","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_3","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_4","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_5","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_6","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_7","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_8","score":0.5}
{"chain_id":"37W3JXSD6674XV30LL7PTS5LE5OWY0_1_9","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_1","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_10","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_2","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_3","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_4","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_5","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_6","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_7","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_8","score":0.5}
{"chain_id":"37WLF8U1WPPBJBZDQOTUMQRXQZN6KZ_1_9","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_1","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_10","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_2","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_3","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_4","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_5","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_6","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_7","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_8","score":0.5}
{"chain_id":"37XITHEISW8MMWL9QZFU925LEIKRC4_1_9","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_1","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_10","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_2","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_3","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_4","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_5","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_6","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_7","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_8","score":0.5}
{"chain_id":"37ZHEEHM6WLORD5BOS6NBIAR7TK37Q_1_9","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_1","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_10","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_2","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_3","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_4","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_5","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_6","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_7","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_8","score":0.5}
{"chain_id":"382M9COHEHETZMX4QKGU41S84MEUE1_1_9","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_1","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_10","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_2","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_3","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_4","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_5","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_6","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_7","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_8","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8G5TDS0H_1_9","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_1","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_10","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_2","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_3","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_4","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_5","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_6","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_7","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_8","score":0.5}
{"chain_id":"384PI804XS0ETJQ6T8MF4B8GQ76S0N_1_9","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_1","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_10","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_2","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_3","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_4","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_5","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_6","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_7","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_8","score":0.5}
{"chain_id":"386PBUZZXFWYRCW2V7ZKAU9HW8OLJ0_1_9","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_1","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_10","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_2","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_3","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_4","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_5","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_6","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_7","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_8","score":0.5}
{"chain_id":"38BQUHLA9WZWY55KBWS4I35SY1SOMJ_1_9","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_1","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_10","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_2","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_3","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_4","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_5","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_6","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_7","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_8","score":0.5}
{"chain_id":"38F5OAUN5NB3LLCA3DVPFCB1RLDH7X_1_9","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_1","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_10","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_2","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_3","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_4","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_5","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_6","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_7","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_8","score":0.5}
{"chain_id":"38JBBYETQO9UIO3PBEPCRXUELCHE4B_1_9","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_1","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_10","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_2","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_3","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_4","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_5","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_6","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_7","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_8","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LS9L7W6M_1_9","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_1","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_10","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_2","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_3","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_4","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_5","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_6","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_7","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_8","score":0.5}
{"chain_id":"38YMOXR4MUY2EBTUF2CXA1LSA25W6J_1_9","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_1","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_10","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_2","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_3","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_4","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_5","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_6","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_7","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_8","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK1822PV_1_9","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_1","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_10","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_2","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_3","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_4","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_5","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_6","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_7","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_8","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK2GHP2E_1_9","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_1","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_10","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_2","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_3","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_4","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_5","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_6","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_7","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_8","score":0.5}
{"chain_id":"3907X2AHF04OC1DSGEMZSUWK3JG2PX_1_9","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_1","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_10","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_2","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_3","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_4","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_5","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_6","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_7","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_8","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RC4NVEXL_1_9","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_1","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_10","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_2","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_3","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_4","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_5","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_6","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_7","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_8","score":0.5}
{"chain_id":"39ASUFLU6X6LGQRZVPRHO8RCYCYEXW_1_9","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_1","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_10","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_2","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_3","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_4","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_5","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_6","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_7","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_8","score":0.5}
{"chain_id":"39DD6S19JPAALLREW7F2LT7N8JFZED_1_9","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_1","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_10","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_2","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_3","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_4","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_5","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_6","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_7","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_8","score":0.5}
{"chain_id":"39GAF6DQWRZUS0SSJMVKT3BXADX1VV_1_9","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_1","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_10","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_2","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_3","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_4","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_5","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_6","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_7","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_8","score":0.5}
{"chain_id":"39GHHAVOMFQ2T4PHPF03OD76QJ2J4O_1_9","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_1","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_10","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_2","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_3","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_4","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_5","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_6","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_7","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_8","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY596Z8VD_1_9","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_1","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_10","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_2","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_3","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_4","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_5","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_6","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_7","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_8","score":0.5}
{"chain_id":"39GXDJN2OTDC30CDI74Z8DY5AKNV8H_1_9","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_1","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_10","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_2","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_3","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_4","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_5","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_6","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_7","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_8","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO0E6CV8_1_9","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_1","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_10","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_2","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_3","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_4","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_5","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_6","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_7","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_8","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUO2CMVC5_1_9","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_1","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_10","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_2","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_3","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_4","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_5","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_6","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_7","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_8","score":0.5}
{"chain_id":"39JEC7537U0EF32QZJK4AZUOYZCCVI_1_9","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_1","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_10","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_2","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_3","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_4","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_5","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_6","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_7","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_8","score":0.5}
{"chain_id":"39L1G8WVWQQAGRQ9ZCPEA8JE5TA31S_1_9","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_1","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_10","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_2","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_3","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_4","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_5","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_6","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_7","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_8","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ00WJIUY_1_9","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_1","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_10","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_2","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_3","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_4","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_5","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_6","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_7","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_8","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ02YBUI8_1_9","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_1","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_10","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_2","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_3","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_4","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_5","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_6","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_7","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_8","score":0.5}
{"chain_id":"39LNWE0K4UV5FRZQM36LPGQ0D5SUIE_1_9","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_1","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_10","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_2","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_3","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_4","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_5","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_6","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_7","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_8","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MGCC38V_1_9","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_1","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_10","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_2","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_3","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_4","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_5","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_6","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_7","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_8","score":0.5}
{"chain_id":"39LOEL67OS4SRRAUYXYTPI6MKJR83X_1_9","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_1","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_10","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_2","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_3","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_4","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_5","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_6","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_7","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_8","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y5BX9P7_1_9","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_1","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_10","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_2","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_3","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_4","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_5","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_6","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_7","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_8","score":0.5}
{"chain_id":"39N5ACM9HEMZCLYR1N1E2H4Y6UD9PQ_1_9","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_1","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_10","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_2","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_3","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_4","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_5","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_6","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_7","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_8","score":0.5}
{"chain_id":"39OWYR0EPKQ2M9H5VWXPP4JLFPMYFY_1_9","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_1","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_10","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_2","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_3","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_4","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_5","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_6","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_7","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_8","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1BITVJ_1_9","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_1","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_10","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_2","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_3","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_4","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_5","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_6","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_7","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_8","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9US1OATV1_1_9","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_1","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_10","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_2","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_3","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_4","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_5","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_6","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_7","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_8","score":0.5}
{"chain_id":"39PAAFCODMZV1K41L5FUZ9USOW3VTZ_1_9","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_1","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_10","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_2","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_3","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_4","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_5","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_6","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_7","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_8","score":0.5}
{"chain_id":"39RP059MEHSCFBGB7RNICJ5T4QHBMZ_1_9","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_1","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_10","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_2","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_3","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_4","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_5","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_6","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_7","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_8","score":0.5}
{"chain_id":"39U1BHVTDLQBPB2I1V9OGE29XUXT3N_1_9","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_1","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_10","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_2","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_3","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_4","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_5","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_6","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_7","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_8","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50COSUJV_1_9","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_1","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_10","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_2","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_3","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_4","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_5","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_6","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_7","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_8","score":0.5}
{"chain_id":"39ZSFO5CA8V1A2JW4LRL1H50CT6UJJ_1_9","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_1","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_10","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_2","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_3","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_4","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_5","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_6","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_7","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_8","score":0.5}
{"chain_id":"3A0EX8ZRN8NC9S5PQUBT6ES0070YBI_1_9","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_1","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_10","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_2","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_3","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_4","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_5","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_6","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_7","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_8","score":0.5}
{"chain_id":"3A1COHJ8NJU7LZHTDINVTC7WZAG8HF_1_9","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_1","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_10","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_2","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_3","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_4","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_5","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_6","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_7","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_8","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOS5WH1Z_1_9","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_1","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_10","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_2","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_3","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_4","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_5","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_6","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_7","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_8","score":0.5}
{"chain_id":"3A1PQ49WVHGPAZX8EXEGE2KOSV31H6_1_9","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_1","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_10","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_2","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_3","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_4","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_5","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_6","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_7","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_8","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1Y2X2MLG_1_9","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_1","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_10","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_2","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_3","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_4","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_5","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_6","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_7","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_8","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YG13ML3_1_9","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_1","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_10","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_2","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_3","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_4","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_5","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_6","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_7","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_8","score":0.5}
{"chain_id":"3A4NIXBJ76YOSK2NY4CCQM1YHNHLMP_1_9","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_1","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_10","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_2","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_3","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_4","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_5","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_6","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_7","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_8","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3F5DAHC7_1_9","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_1","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_10","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_2","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_3","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_4","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_5","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_6","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_7","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_8","score":0.5}
{"chain_id":"3A4TN5196KH9X276UU30VY3FW1LHCL_1_9","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_1","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_10","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_2","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_3","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_4","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_5","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_6","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_7","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_8","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO445CCXJ4_1_9","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_1","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_10","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_2","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_3","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_4","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_5","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_6","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_7","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_8","score":0.5}
{"chain_id":"3A7Y0R2P2ONTR6DR9Q28LO44JUOJXG_1_9","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_1","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_10","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_2","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_3","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_4","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_5","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_6","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_7","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_8","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JHBRTHV_1_9","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_1","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_10","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_2","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_3","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_4","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_5","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_6","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_7","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_8","score":0.5}
{"chain_id":"3AAPLD8UCCGQJJ9HGGYQK83JJ73TH1_1_9","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_1","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_10","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_2","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_3","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_4","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_5","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_6","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_7","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_8","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZICYMPNS_1_9","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_1","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_10","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_2","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_3","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_4","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_5","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_6","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_7","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_8","score":0.5}
{"chain_id":"3AMW0RGHOD1K1N2L2XKJKIZIH3DNPW_1_9","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_1","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_10","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_2","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_3","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_4","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_5","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_6","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_7","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_8","score":0.5}
{"chain_id":"3AMYWKA6YBLU92KSOGLNT78651TO6M_1_9","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_1","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_10","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_2","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_3","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_4","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_5","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_6","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_7","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_8","score":0.5}
{"chain_id":"3AQF3RZ558H03P7ZPD2X6DZST9WF6J_1_9","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_1","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_10","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_2","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_3","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_4","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_5","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_6","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_7","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_8","score":0.5}
{"chain_id":"3ATPCQ38J897QI0XKGBXB38UI03AY9_1_9","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_1","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_10","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_2","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_3","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_4","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_5","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_6","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_7","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_8","score":0.5}
{"chain_id":"3ATTHHXXWANXWVTLR8H89NP4XT3IXQ_1_9","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_1","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_10","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_2","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_3","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_4","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_5","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_6","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_7","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_8","score":0.5}
{"chain_id":"3AUQQEL7U5SULB7AN3RKFYSNQX7V0B_1_9","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_1","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_10","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_2","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_3","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_4","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_5","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_6","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_7","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_8","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685F4GKIZF_1_9","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_1","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_10","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_2","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_3","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_4","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_5","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_6","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_7","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_8","score":0.5}
{"chain_id":"3AWETUDC92RM1QT0SQ5T685FZGOIZE_1_9","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_1","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_10","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_2","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_3","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_4","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_5","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_6","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_7","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_8","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87XCTB67_1_9","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_1","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_10","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_2","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_3","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_4","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_5","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_6","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_7","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_8","score":0.5}
{"chain_id":"3B2X28YI3WEAQ8VJKBG1NN87Y2O6BE_1_9","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_1","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_10","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_2","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_3","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_4","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_5","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_6","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_7","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_8","score":0.5}
{"chain_id":"3B3WTRP3DB13KEXLZPQ264W5YN892Z_1_9","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_1","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_10","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_2","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_3","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_4","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_5","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_6","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_7","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_8","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7930LLRSD_1_9","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_1","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_10","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_2","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_3","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_4","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_5","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_6","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_7","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_8","score":0.5}
{"chain_id":"3B837J3LDOV2TDA5NL5UO7933D2RSH_1_9","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_1","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_10","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_2","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_3","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_4","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_5","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_6","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_7","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_8","score":0.5}
{"chain_id":"3BDCF01OGXTOM1R1H70NKHO5PAJLYS_1_9","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_1","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_10","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_2","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_3","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_4","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_5","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_6","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_7","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_8","score":0.5}
{"chain_id":"3BGYGHDBBXJSKUCMEGBNWHORO09229_1_9","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_1","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_10","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_2","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_3","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_4","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_5","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_6","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_7","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_8","score":0.5}
{"chain_id":"3BQU611VFPJEKYIKKY5HGR4J3IH993_1_9","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_1","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_10","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_2","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_3","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_4","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_5","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_6","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_7","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_8","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1JVO6AT_1_9","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_1","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_10","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_2","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_3","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_4","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_5","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_6","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_7","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_8","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1K6ZA6V_1_9","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_1","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_10","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_2","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_3","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_4","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_5","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_6","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_7","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_8","score":0.5}
{"chain_id":"3BV8HQ2ZZW057YQREXG5SCO1NC7A6I_1_9","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_1","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_10","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_2","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_3","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_4","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_5","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_6","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_7","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_8","score":0.5}
{"chain_id":"3BWI6RSP7G8R1BL8DCNJU9EOD46E7B_1_9","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_1","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_10","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_2","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_3","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_4","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_5","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_6","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_7","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_8","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHFNRUMC_1_9","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_1","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_10","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_2","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_3","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_4","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_5","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_6","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_7","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_8","score":0.5}
{"chain_id":"3BXQMRHWKZXRBAPH7I4DH9XHTD8UMN_1_9","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_1","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_10","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_2","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_3","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_4","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_5","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_6","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_7","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_8","score":0.5}
{"chain_id":"3C2NJ6JBKAGO9G1F0Z97O5RP6JMN2Q_1_9","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_1","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_10","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_2","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_3","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_4","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_5","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_6","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_7","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_8","score":0.5}
{"chain_id":"3C44YUNSI1OBFBB8D36GODNOZNZDP0_1_9","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_1","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_10","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_2","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_3","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_4","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_5","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_6","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_7","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_8","score":0.5}
{"chain_id":"3C6FJU71TQSR5REVQLSOB4KOR2AUYK_1_9","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_1","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_10","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_2","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_3","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_4","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_5","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_6","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_7","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_8","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWFF7MZC_1_9","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_1","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_10","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_2","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_3","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_4","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_5","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_6","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_7","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_8","score":0.5}
{"chain_id":"3C8HJ7UOP7T8X9JRD53LY1CWGSRZM0_1_9","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_1","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_10","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_2","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_3","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_4","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_5","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_6","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_7","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_8","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHFRV95H_1_9","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_1","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_10","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_2","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_3","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_4","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_5","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_6","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_7","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_8","score":0.5}
{"chain_id":"3CCZ6YKWR7IVJBG8H8S04BZHVZO956_1_9","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_1","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_10","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_2","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_3","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_4","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_5","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_6","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_7","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_8","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCG4G8I7K_1_9","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_1","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_10","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_2","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_3","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_4","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_5","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_6","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_7","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_8","score":0.5}
{"chain_id":"3CFJTT4SXTP3HGNU9VDAFOCGQHAI7A_1_9","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_1","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_10","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_2","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_3","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_4","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_5","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_6","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_7","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_8","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CAYS6LV_1_9","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_1","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_10","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_2","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_3","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_4","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_5","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_6","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_7","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_8","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CCVDL6R_1_9","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_1","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_10","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_2","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_3","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_4","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_5","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_6","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_7","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_8","score":0.5}
{"chain_id":"3CFVK00FWLKM3HHVBO5V1Q4CD706LO_1_9","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_1","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_10","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_2","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_3","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_4","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_5","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_6","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_7","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_8","score":0.5}
{"chain_id":"3CN4LGXD5XNSOTKGBF16Y0MUPYL4YL_1_9","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_1","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_10","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_2","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_3","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_4","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_5","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_6","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_7","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_8","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEFB5KPJ_1_9","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_1","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_10","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_2","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_3","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_4","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_5","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_6","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_7","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_8","score":0.5}
{"chain_id":"3COPXFW7XBBJTHHI5KS3SQIEHMVPK2_1_9","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_1","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_10","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_2","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_3","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_4","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_5","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_6","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_7","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_8","score":0.5}
{"chain_id":"3CP1TO84PT0KJRV9WZDLUOR3F8U257_1_9","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_1","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_10","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_2","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_3","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_4","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_5","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_6","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_7","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_8","score":0.5}
{"chain_id":"3CPLWGV3MOYZ90MEL8OMYSZ35T39NC_1_9","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_1","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_10","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_2","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_3","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_4","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_5","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_6","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_7","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_8","score":0.5}
{"chain_id":"3D3VGR7TA0EY9WPQX64TGZ1RA5H3RU_1_9","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_1","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_10","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_2","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_3","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_4","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_5","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_6","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_7","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_8","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q64IE9G3_1_9","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_1","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_10","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_2","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_3","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_4","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_5","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_6","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_7","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_8","score":0.5}
{"chain_id":"3D4CH1LGEASTZ85SY4BR88Q66Y8G92_1_9","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_1","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_10","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_2","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_3","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_4","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_5","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_6","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_7","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_8","score":0.5}
{"chain_id":"3DBQWDE4Y6XG8DK2IIB5MCU1M6U5NC_1_9","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_1","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_10","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_2","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_3","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_4","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_5","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_6","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_7","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_8","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSH29XYT_1_9","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_1","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_10","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_2","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_3","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_4","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_5","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_6","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_7","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_8","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSIEZXY8_1_9","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_1","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_10","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_2","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_3","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_4","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_5","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_6","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_7","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_8","score":0.5}
{"chain_id":"3DEL4X4EL6KJMV8QVQ3BVBJSKNPXYI_1_9","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_1","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_10","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_2","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_3","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_4","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_5","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_6","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_7","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_8","score":0.5}
{"chain_id":"3DH6GAKTYYO8RQ85W8RWSWZ3QV5ZYD_1_9","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_1","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_10","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_2","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_3","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_4","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_5","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_6","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_7","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_8","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS0RCG2M_1_9","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_1","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_10","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_2","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_3","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_4","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_5","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_6","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_7","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_8","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLIS31IG2F_1_9","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_1","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_10","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_2","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_3","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_4","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_5","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_6","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_7","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_8","score":0.5}
{"chain_id":"3DHE4R9OCWAIZOKTDVWCYLISEYO2GC_1_9","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_1","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_10","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_2","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_3","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_4","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_5","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_6","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_7","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_8","score":0.5}
{"chain_id":"3DI28L7YXADDQP66OW6ATZNBAFRE1T_1_9","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_1","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_10","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_2","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_3","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_4","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_5","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_6","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_7","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_8","score":0.5}
{"chain_id":"3DL65MZB8DEXDSG44TVUAV624SKEC7_1_9","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_1","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_10","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_2","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_3","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_4","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_5","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_6","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_7","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_8","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921RHV46B_1_9","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_1","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_10","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_2","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_3","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_4","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_5","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_6","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_7","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_8","score":0.5}
{"chain_id":"3DPNQGW4LLEQ59AA5W6EF921UV346E_1_9","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_1","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_10","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_2","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_3","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_4","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_5","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_6","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_7","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_8","score":0.5}
{"chain_id":"3DQQ64TANGKAOHBZUYB6G1C99SOPWL_1_9","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_1","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_10","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_2","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_3","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_4","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_5","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_6","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_7","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_8","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JJKVSE_1_9","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_1","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_10","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_2","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_3","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_4","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_5","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_6","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_7","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_8","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6JSFSVO_1_9","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_1","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_10","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_2","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_3","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_4","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_5","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_6","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_7","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_8","score":0.5}
{"chain_id":"3DUZQ9U6SMNUMKRMW5A4P4Y6MF2VSR_1_9","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_1","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_10","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_2","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_3","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_4","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_5","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_6","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_7","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_8","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUF8GA555_1_9","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_1","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_10","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_2","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_3","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_4","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_5","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_6","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_7","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_8","score":0.5}
{"chain_id":"3DY46V3X3PHCIOEWEIFJSVUFV8W55Y_1_9","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_1","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_10","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_2","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_3","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_4","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_5","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_6","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_7","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_8","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA1RXVRA_1_9","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_1","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_10","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_2","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_3","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_4","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_5","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_6","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_7","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_8","score":0.5}
{"chain_id":"3DY4FPOOA1NIL5R9HGAZZUTA4IPRVJ_1_9","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_1","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_10","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_2","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_3","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_4","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_5","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_6","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_7","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_8","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8B3YN8IQ_1_9","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_1","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_10","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_2","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_3","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_4","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_5","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_6","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_7","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_8","score":0.5}
{"chain_id":"3E1QT0TDFP87HUSDJ05GTO8BFQM8IL_1_9","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_1","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_10","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_2","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_3","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_4","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_5","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_6","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_7","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_8","score":0.5}
{"chain_id":"3E337GFOL97EOA9MZEUWQ9LQG34GNH_1_9","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_1","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_10","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_2","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_3","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_4","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_5","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_6","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_7","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_8","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4URKTICF_1_9","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_1","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_10","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_2","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_3","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_4","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_5","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_6","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_7","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_8","score":0.5}
{"chain_id":"3E47SOBEYQV9TXIQ0CLLVA4UTNYICS_1_9","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_1","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_10","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_2","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_3","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_4","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_5","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_6","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_7","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_8","score":0.5}
{"chain_id":"3E4GGUZ1T8QN1AGC3MHRWDAL5VGK2O_1_9","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_1","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_10","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_2","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_3","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_4","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_5","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_6","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_7","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_8","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97E7D9Y_1_9","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_1","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_10","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_2","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_3","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_4","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_5","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_6","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_7","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_8","score":0.5}
{"chain_id":"3E7TUJ2EGCLQNOV1WEAJ2NN97SUD9D_1_9","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_1","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_10","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_2","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_3","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_4","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_5","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_6","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_7","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_8","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGCWTIP_1_9","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_1","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_10","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_2","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_3","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_4","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_5","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_6","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_7","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_8","score":0.5}
{"chain_id":"3EA3QWIZ4IUQFEK1MYGBKK4YGF2ITQ_1_9","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_1","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_10","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_2","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_3","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_4","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_5","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_6","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_7","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_8","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF034JQ1JL_1_9","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_1","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_10","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_2","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_3","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_4","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_5","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_6","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_7","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_8","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03EE81J3_1_9","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_1","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_10","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_2","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_3","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_4","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_5","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_6","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_7","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_8","score":0.5}
{"chain_id":"3EF8EXOTT1UL15SY2XH1QF03Y09J1E_1_9","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_1","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_10","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_2","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_3","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_4","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_5","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_6","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_7","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_8","score":0.5}
{"chain_id":"3EFVCAY5L383C5CJ1IQG5PNBF7KJ88_1_9","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_1","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_10","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_2","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_3","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_4","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_5","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_6","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_7","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_8","score":0.5}
{"chain_id":"3EG49X351UBH3BYKP78JCKZZNRTX69_1_9","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_1","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_10","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_2","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_3","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_4","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_5","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_6","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_7","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_8","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FA58IZ67_1_9","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_1","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_10","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_2","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_3","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_4","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_5","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_6","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_7","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_8","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FASHG6ZH_1_9","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_1","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_10","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_2","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_3","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_4","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_5","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_6","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_7","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_8","score":0.5}
{"chain_id":"3EJPLAJKEMF686YZQPW495FAVR86ZW_1_9","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_1","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_10","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_2","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_3","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_4","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_5","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_6","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_7","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_8","score":0.5}
{"chain_id":"3EO896NRAWUMAP474G1BWLJY76QTJE_1_9","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_1","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_10","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_2","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_3","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_4","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_5","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_6","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_7","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_8","score":0.5}
{"chain_id":"3EQHHY4HQSRAYL3GVEYAWSL4M9M5GD_1_9","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_1","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_10","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_2","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_3","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_4","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_5","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_6","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_7","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_8","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZE9J9KH_1_9","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_1","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_10","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_2","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_3","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_4","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_5","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_6","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_7","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_8","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZEAC9KC_1_9","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_1","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_10","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_2","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_3","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_4","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_5","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_6","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_7","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_8","score":0.5}
{"chain_id":"3ERET4BTVM8Y1U1BOVW660IZF9I9KH_1_9","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_1","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_10","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_2","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_3","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_4","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_5","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_6","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_7","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_8","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64GZPY7B_1_9","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_1","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_10","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_2","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_3","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_4","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_5","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_6","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_7","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_8","score":0.5}
{"chain_id":"3F0BG9B9MPMP7G2ZDDZD1C64K6YY72_1_9","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_1","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_10","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_2","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_3","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_4","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_5","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_6","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_7","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_8","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8BMR9Q1_1_9","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_1","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_10","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_2","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_3","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_4","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_5","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_6","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_7","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_8","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8CKDQ91_1_9","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_1","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_10","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_2","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_3","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_4","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_5","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_6","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_7","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_8","score":0.5}
{"chain_id":"3F1567XTNW4KCXZ610G32MU8QNYQ96_1_9","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_1","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_10","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_2","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_3","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_4","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_5","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_6","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_7","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_8","score":0.5}
{"chain_id":"3FDJT1UU747F07ZZL5JPUKDXTC25KN_1_9","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_1","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_10","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_2","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_3","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_4","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_5","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_6","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_7","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_8","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZN07KOPV_1_9","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_1","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_10","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_2","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_3","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_4","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_5","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_6","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_7","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_8","score":0.5}
{"chain_id":"3FE2ERCCZX72J82X3CKWZ7ZNGAUPOS_1_9","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_1","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_10","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_2","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_3","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_4","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_5","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_6","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_7","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_8","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMU74J2QI_1_9","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_1","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_10","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_2","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_3","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_4","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_5","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_6","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_7","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_8","score":0.5}
{"chain_id":"3FE7TXL1LIM9CDE7GR1OSZMUB4X2Q0_1_9","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_1","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_10","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_2","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_3","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_4","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_5","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_6","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_7","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_8","score":0.5}
{"chain_id":"3FIJLY1B6U38DVP44916CDQ9BHDPFJ_1_9","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_1","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_10","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_2","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_3","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_4","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_5","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_6","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_7","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_8","score":0.5}
{"chain_id":"3FK0YFF9PZFAEC8QQ0F90RIDNSXVVH_1_9","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_1","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_10","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_2","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_3","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_4","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_5","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_6","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_7","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_8","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSNP3V3_1_9","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_1","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_10","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_2","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_3","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_4","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_5","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_6","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_7","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_8","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSOVV33_1_9","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_1","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_10","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_2","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_3","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_4","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_5","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_6","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_7","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_8","score":0.5}
{"chain_id":"3FPRZHYEPY6Q23676Q93HWQUSPW3VE_1_9","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_1","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_10","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_2","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_3","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_4","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_5","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_6","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_7","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_8","score":0.5}
{"chain_id":"3FQ5JJ512LNJQW55P5FBO1DJJM1NK7_1_9","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_1","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_10","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_2","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_3","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_4","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_5","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_6","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_7","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_8","score":0.5}
{"chain_id":"3FTF2T8WLRHPWUVSD9F9UBCU1ZB9WY_1_9","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_1","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_10","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_2","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_3","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_4","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_5","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_6","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_7","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_8","score":0.5}
{"chain_id":"3FTOP5WARFNLUG7G6ED1CAHT0V0J0P_1_9","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_1","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_10","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_2","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_3","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_4","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_5","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_6","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_7","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_8","score":0.5}
{"chain_id":"3G0WWMR1UVJ51Z302AZ8KNPSI2MNQC_1_9","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_1","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_10","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_2","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_3","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_4","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_5","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_6","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_7","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_8","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBFGB76F_1_9","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_1","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_10","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_2","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_3","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_4","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_5","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_6","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_7","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_8","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBHBU76Q_1_9","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_1","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_10","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_2","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_3","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_4","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_5","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_6","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_7","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_8","score":0.5}
{"chain_id":"3G2UL9A02DDNOWST7U4LILMBW6E67E_1_9","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_1","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_10","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_2","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_3","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_4","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_5","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_6","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_7","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_8","score":0.5}
{"chain_id":"3G5F9DBFOPW5WBD6LBY5LQR4H2ZVHK_1_9","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_1","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_10","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_2","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_3","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_4","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_5","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_6","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_7","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_8","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMI1H30_1_9","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_1","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_10","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_2","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_3","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_4","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_5","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_6","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_7","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_8","score":0.5}
{"chain_id":"3GA6AFUKOONLK2BN11SZ2VZLMKMH3P_1_9","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_1","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_10","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_2","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_3","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_4","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_5","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_6","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_7","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_8","score":0.5}
{"chain_id":"3GDTJDAPVUATDDI44F38LHFSK728M1_1_9","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_1","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_10","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_2","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_3","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_4","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_5","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_6","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_7","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_8","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SE735WL_1_9","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_1","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_10","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_2","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_3","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_4","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_5","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_6","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_7","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_8","score":0.5}
{"chain_id":"3GFK2QRXX9G6V7TGAJ2SIV7SSUMW5J_1_9","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_1","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_10","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_2","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_3","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_4","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_5","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_6","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_7","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_8","score":0.5}
{"chain_id":"3GGAI1SQEVXVPG8HLRJDN3BB9YJCM6_1_9","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_1","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_10","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_2","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_3","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_4","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_5","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_6","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_7","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_8","score":0.5}
{"chain_id":"3GM6G9ZBKNWCBXAS7DE3CDBFUFHMTK_1_9","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_1","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_10","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_2","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_3","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_4","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_5","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_6","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_7","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_8","score":0.5}
{"chain_id":"3GNA64GUZE31BAXUYA3MQ6P64X9Q5L_1_9","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_1","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_10","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_2","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_3","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_4","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_5","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_6","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_7","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_8","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIBUNPAC_1_9","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_1","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_10","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_2","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_3","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_4","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_5","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_6","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_7","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_8","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIDVDAPR_1_9","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_1","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_10","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_2","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_3","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_4","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_5","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_6","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_7","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_8","score":0.5}
{"chain_id":"3GNCZX450IMDH48WTTFEYCFIHBZPAS_1_9","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_1","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_10","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_2","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_3","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_4","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_5","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_6","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_7","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_8","score":0.5}
{"chain_id":"3GS6S824SQWALWZMEJYECC7E4D2WN6_1_9","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_1","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_10","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_2","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_3","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_4","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_5","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_6","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_7","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_8","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOA1QPBK_1_9","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_1","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_10","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_2","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_3","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_4","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_5","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_6","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_7","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_8","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVBGBP1_1_9","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_1","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_10","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_2","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_3","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_4","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_5","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_6","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_7","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_8","score":0.5}
{"chain_id":"3GU1KF0O4I0I0EDOZ7FATNZOVFXBPQ_1_9","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_1","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_10","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_2","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_3","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_4","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_5","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_6","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_7","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_8","score":0.5}
{"chain_id":"3H7XDTSHKCQ4B4I0RFLJMSYH6Z4GW5_1_9","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_1","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_10","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_2","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_3","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_4","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_5","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_6","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_7","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_8","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVT8X0LPP_1_9","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_1","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_10","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_2","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_3","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_4","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_5","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_6","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_7","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_8","score":0.5}
{"chain_id":"3H7Z272LX76UDNZ0QK447QVTCYVLPQ_1_9","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_1","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_10","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_2","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_3","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_4","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_5","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_6","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_7","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_8","score":0.5}
{"chain_id":"3H8DHMCCW9AA4KES0B18SW1P6R0DKZ_1_9","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_1","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_10","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_2","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_3","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_4","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_5","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_6","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_7","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_8","score":0.5}
{"chain_id":"3HFNH7HEMHDZR7MEF6MDU3GVI3JGQ7_1_9","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_1","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_10","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_2","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_3","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_4","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_5","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_6","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_7","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_8","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8BSF9OS_1_9","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_1","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_10","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_2","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_3","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_4","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_5","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_6","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_7","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_8","score":0.5}
{"chain_id":"3HHRAGRYX84HRLMQMNOZDHZ8WNNO9Q_1_9","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_1","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_10","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_2","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_3","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_4","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_5","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_6","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_7","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_8","score":0.5}
{"chain_id":"3HL8HNGX450NL89XNK59QNQU53J9FB_1_9","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_1","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_10","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_2","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_3","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_4","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_5","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_6","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_7","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_8","score":0.5}
{"chain_id":"3HMIGG0U4L5T7U7INIRJFC2ZW208YK_1_9","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_1","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_10","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_2","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_3","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_4","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_5","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_6","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_7","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_8","score":0.5}
{"chain_id":"3HMVI3QICJRBWUNXOXI402FRD0R1YT_1_9","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_1","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_10","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_2","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_3","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_4","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_5","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_6","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_7","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_8","score":0.5}
{"chain_id":"3HOSI13XHZN2QE8I8UFLOJ6ZLL0DDO_1_9","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_1","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_10","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_2","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_3","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_4","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_5","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_6","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_7","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_8","score":0.5}
{"chain_id":"3HQUKB7LNFD0EAIMGMH16P0VI1DHH6_1_9","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_1","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_10","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_2","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_3","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_4","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_5","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_6","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_7","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_8","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A01JM04_1_9","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_1","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_10","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_2","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_3","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_4","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_5","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_6","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_7","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_8","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3JBM0Z_1_9","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_1","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_10","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_2","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_3","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_4","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_5","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_6","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_7","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_8","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29A3LXM0P_1_9","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_1","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_10","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_2","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_3","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_4","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_5","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_6","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_7","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_8","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AXZK0MC_1_9","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_1","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_10","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_2","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_3","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_4","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_5","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_6","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_7","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_8","score":0.5}
{"chain_id":"3HRMW88U16PBVOD19BQTS29AYI40MZ_1_9","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_1","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_10","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_2","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_3","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_4","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_5","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_6","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_7","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_8","score":0.5}
{"chain_id":"3HWRJOOET51DK9501FLUP0AKJPRSEH_1_9","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_1","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_10","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_2","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_3","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_4","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_5","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_6","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_7","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_8","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGX102F9_1_9","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_1","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_10","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_2","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_3","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_4","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_5","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_6","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_7","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_8","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZ5VF2R_1_9","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_1","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_10","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_2","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_3","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_4","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_5","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_6","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_7","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_8","score":0.5}
{"chain_id":"3HYA4D452RICLOOY2BQUG0IGZU52F2_1_9","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_1","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_10","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_2","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_3","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_4","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_5","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_6","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_7","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_8","score":0.5}
{"chain_id":"3I02618YA05XWDMUZYW5YDRCODAUPQ_1_9","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_1","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_10","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_2","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_3","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_4","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_5","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_6","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_7","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_8","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHASO0YY_1_9","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_1","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_10","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_2","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_3","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_4","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_5","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_6","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_7","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_8","score":0.5}
{"chain_id":"3I0BTBYZAXKBP52FSEE4MXWHBB3Y0E_1_9","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_1","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_10","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_2","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_3","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_4","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_5","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_6","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_7","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_8","score":0.5}
{"chain_id":"3I2PTA7R3TT4TTIX5X7SSV8OSXZKQ6_1_9","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_1","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_10","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_2","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_3","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_4","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_5","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_6","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_7","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_8","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P3NO2A4_1_9","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_1","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_10","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_2","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_3","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_4","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_5","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_6","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_7","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_8","score":0.5}
{"chain_id":"3I33IC7ZWF1HPX7QRV422Z7P4QE2A1_1_9","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_1","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_10","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_2","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_3","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_4","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_5","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_6","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_7","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_8","score":0.5}
{"chain_id":"3I3WADAZ9Q3YQYKEJXBI11U6DUC5OX_1_9","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_1","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_10","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_2","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_3","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_4","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_5","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_6","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_7","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_8","score":0.5}
{"chain_id":"3IAEQB9FMEJ1ZK89PPKBG7VKSKRDWK_1_9","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_1","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_10","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_2","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_3","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_4","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_5","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_6","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_7","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_8","score":0.5}
{"chain_id":"3IAS3U3I0FFM87CIX94YXARVL9J2BH_1_9","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_1","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_10","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_2","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_3","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_4","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_5","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_6","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_7","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_8","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CENFISA_1_9","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_1","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_10","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_2","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_3","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_4","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_5","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_6","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_7","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_8","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CR51IS9_1_9","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_1","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_10","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_2","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_3","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_4","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_5","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_6","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_7","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_8","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRLYIS2_1_9","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_1","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_10","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_2","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_3","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_4","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_5","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_6","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_7","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_8","score":0.5}
{"chain_id":"3IFS6Q0HJIIP0E7MXYBLAF8CRT1ISL_1_9","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_1","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_10","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_2","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_3","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_4","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_5","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_6","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_7","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_8","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS3M9NOF_1_9","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_1","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_10","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_2","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_3","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_4","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_5","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_6","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_7","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_8","score":0.5}
{"chain_id":"3IGI0VL647J2GNQKNX74VIUS4CKON8_1_9","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_1","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_10","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_2","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_3","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_4","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_5","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_6","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_7","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_8","score":0.5}
{"chain_id":"3IHR8NYAM70YFFSFKS5NL9TIOGH4P3_1_9","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_1","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_10","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_2","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_3","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_4","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_5","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_6","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_7","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_8","score":0.5}
{"chain_id":"3II4UPYCOJ6WFNCNDGL5OIDY4WPQDY_1_9","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_1","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_10","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_2","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_3","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_4","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_5","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_6","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_7","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_8","score":0.5}
{"chain_id":"3IJXV6UZ1XIDZZ79I9BGK53GSP9RIG_1_9","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_1","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_10","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_2","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_3","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_4","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_5","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_6","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_7","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_8","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLE9BSFN9_1_9","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_1","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_10","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_2","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_3","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_4","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_5","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_6","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_7","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_8","score":0.5}
{"chain_id":"3IKZ72A5B4F8AADROUOE8OLECZWFNS_1_9","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_1","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_10","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_2","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_3","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_4","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_5","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_6","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_7","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_8","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6U8S68W_1_9","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_1","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_10","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_2","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_3","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_4","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_5","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_6","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_7","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_8","score":0.5}
{"chain_id":"3IO1LGZLK9WROAXCHZWRWLI6XPT860_1_9","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_1","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_10","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_2","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_3","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_4","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_5","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_6","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_7","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_8","score":0.5}
{"chain_id":"3IOEN3P9S7I9DADRIENCHBVYC1F61Z_1_9","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_1","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_10","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_2","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_3","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_4","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_5","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_6","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_7","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_8","score":0.5}
{"chain_id":"3IQ1VMJRYTJSPHSPC4JHCMF38UM9AS_1_9","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_1","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_10","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_2","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_3","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_4","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_5","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_6","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_7","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_8","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCNSBJS6_1_9","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_1","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_10","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_2","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_3","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_4","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_5","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_6","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_7","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_8","score":0.5}
{"chain_id":"3ITXP059PWILVPRSFGWLMGNCTXKJSV_1_9","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_1","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_10","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_2","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_3","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_4","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_5","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_6","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_7","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_8","score":0.5}
{"chain_id":"3IUZPWIU1O69DQEJH66YKKQACBBKWR_1_9","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_1","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_10","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_2","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_3","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_4","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_5","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_6","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_7","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_8","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG1RGRJG_1_9","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_1","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_10","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_2","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_3","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_4","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_5","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_6","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_7","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_8","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG7V4JRA_1_9","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_1","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_10","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_2","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_3","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_4","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_5","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_6","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_7","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_8","score":0.5}
{"chain_id":"3IX2EGZR7BI9UARF0RWGW6IG8JRRJI_1_9","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_1","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_10","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_2","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_3","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_4","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_5","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_6","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_7","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_8","score":0.5}
{"chain_id":"3IXEICO792IAMUP0KX7MNHET7NC6TN_1_9","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_1","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_10","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_2","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_3","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_4","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_5","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_6","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_7","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_8","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QI6B90_1_9","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_1","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_10","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_2","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_3","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_4","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_5","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_6","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_7","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_8","score":0.5}
{"chain_id":"3IXQG4FA2TXX8RXHIIJD7XZ9QLJ9BH_1_9","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_1","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_10","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_2","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_3","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_4","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_5","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_6","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_7","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_8","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CO8K4068_1_9","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_1","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_10","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_2","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_3","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_4","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_5","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_6","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_7","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_8","score":0.5}
{"chain_id":"3J2UYBXQQLB96LS9MVJC36CONOU06L_1_9","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_1","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_10","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_2","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_3","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_4","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_5","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_6","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_7","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_8","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JCDIQWX_1_9","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_1","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_10","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_2","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_3","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_4","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_5","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_6","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_7","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_8","score":0.5}
{"chain_id":"3J4Q2Z4UTY2VOTCEUBQVG62JEPPQWU_1_9","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_1","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_10","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_2","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_3","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_4","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_5","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_6","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_7","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_8","score":0.5}
{"chain_id":"3J88R45B2GXPDHGP437SIKQGJHHPXC_1_9","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_1","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_10","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_2","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_3","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_4","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_5","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_6","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_7","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_8","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE3409LM_1_9","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_1","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_10","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_2","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_3","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_4","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_5","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_6","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_7","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_8","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VE6G9L9Y_1_9","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_1","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_10","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_2","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_3","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_4","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_5","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_6","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_7","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_8","score":0.5}
{"chain_id":"3JAOYWH7VI39L0JT9V87L0VEGKT9LO_1_9","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_1","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_10","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_2","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_3","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_4","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_5","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_6","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_7","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_8","score":0.5}
{"chain_id":"3JBT3HLQF81EICG45LVDF56R0WLZPS_1_9","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_1","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_10","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_2","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_3","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_4","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_5","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_6","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_7","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_8","score":0.5}
{"chain_id":"3JC6VJ2SABI9OUQD9E6006A10SA5A6_1_9","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_1","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_10","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_2","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_3","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_4","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_5","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_6","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_7","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_8","score":0.5}
{"chain_id":"3JCG6DTRV3PQ4WGSI2RPOHKXNGIQQH_1_9","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_1","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_10","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_2","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_3","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_4","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_5","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_6","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_7","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_8","score":0.5}
{"chain_id":"3JJVG1YBEBWE74V5FS6WVHU78VN5BP_1_9","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_1","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_10","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_2","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_3","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_4","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_5","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_6","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_7","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_8","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURI33VER_1_9","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_1","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_10","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_2","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_3","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_4","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_5","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_6","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_7","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_8","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURLWJEVF_1_9","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_1","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_10","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_2","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_3","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_4","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_5","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_6","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_7","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_8","score":0.5}
{"chain_id":"3JMSRU9HQITTC1M4VAQZ0NURZK2EVO_1_9","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_1","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_10","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_2","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_3","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_4","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_5","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_6","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_7","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_8","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0AHI2LU_1_9","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_1","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_10","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_2","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_3","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_4","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_5","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_6","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_7","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_8","score":0.5}
{"chain_id":"3JNQLM5FT4LYLGYUOMTSBDN0ZRGL2K_1_9","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_1","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_10","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_2","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_3","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_4","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_5","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_6","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_7","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_8","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANFXPNAL_1_9","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_1","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_10","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_2","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_3","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_4","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_5","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_6","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_7","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_8","score":0.5}
{"chain_id":"3JPSL1DZ5SYDEJWJDE2MUNANG6PANR_1_9","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_1","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_10","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_2","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_3","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_4","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_5","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_6","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_7","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_8","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS287H13E5_1_9","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_1","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_10","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_2","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_3","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_4","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_5","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_6","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_7","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_8","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28TNX3EZ_1_9","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_1","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_10","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_2","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_3","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_4","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_5","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_6","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_7","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_8","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U3Z3EY_1_9","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_1","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_10","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_2","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_3","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_4","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_5","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_6","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_7","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_8","score":0.5}
{"chain_id":"3JRJSWSMQHK90SKLB0KTHS28U8P3EY_1_9","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_1","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_10","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_2","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_3","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_4","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_5","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_6","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_7","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_8","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y5GOS_1_9","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_1","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_10","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_2","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_3","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_4","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_5","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_6","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_7","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_8","score":0.5}
{"chain_id":"3JV9LGBJWTDW6V9Y0TU95YLV0Y8GOV_1_9","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_1","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_10","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_2","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_3","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_4","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_5","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_6","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_7","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_8","score":0.5}
{"chain_id":"3JW0YLFXRTF08Q806KP18VK5YEBWWH_1_9","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_1","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_10","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_2","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_3","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_4","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_5","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_6","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_7","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_8","score":0.5}
{"chain_id":"3JWH6J9I9SCIXT1BJS2IPYUTV6ABNK_1_9","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_1","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_10","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_2","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_3","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_4","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_5","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_6","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_7","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_8","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHBNIFGS_1_9","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_1","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_10","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_2","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_3","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_4","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_5","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_6","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_7","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_8","score":0.5}
{"chain_id":"3JZQSN0I3Q920IW51QBJI4CHO8RFGK_1_9","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_1","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_10","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_2","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_3","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_4","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_5","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_6","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_7","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_8","score":0.5}
{"chain_id":"3K2755HG5S2ZOYMEZ0ABCJ9KX0EFD3_1_9","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_1","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_10","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_2","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_3","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_4","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_5","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_6","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_7","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_8","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP0U9T_1_9","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_1","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_10","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_2","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_3","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_4","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_5","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_6","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_7","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_8","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDP19U9_1_9","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_1","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_10","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_2","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_3","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_4","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_5","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_6","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_7","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_8","score":0.5}
{"chain_id":"3K3R2QNK8B2C4Q6NI908CNRXDSC9UQ_1_9","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_1","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_10","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_2","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_3","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_4","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_5","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_6","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_7","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_8","score":0.5}
{"chain_id":"3K5TEWLKGVA5S6OQRTGQL2SJGAPVID_1_9","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_1","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_10","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_2","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_3","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_4","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_5","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_6","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_7","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_8","score":0.5}
{"chain_id":"3K772S5NP8AOU0RKQL9VLM3IFH3EHM_1_9","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_1","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_10","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_2","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_3","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_4","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_5","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_6","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_7","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_8","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QY67BLNH_1_9","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_1","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_10","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_2","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_3","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_4","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_5","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_6","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_7","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_8","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYBBNNL8_1_9","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_1","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_10","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_2","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_3","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_4","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_5","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_6","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_7","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_8","score":0.5}
{"chain_id":"3K9FOBBF2HIUA2NNA5RC31QYCTZNLL_1_9","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_1","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_10","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_2","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_3","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_4","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_5","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_6","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_7","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_8","score":0.5}
{"chain_id":"3KAKFY4PGU1LGXM77JAK2700MBF3IA_1_9","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_1","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_10","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_2","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_3","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_4","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_5","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_6","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_7","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_8","score":0.5}
{"chain_id":"3KB8R4ZV1E6CN1KPWOPNZELWSR5BGL_1_9","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_1","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_10","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_2","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_3","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_4","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_5","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_6","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_7","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_8","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJEP1U2V_1_9","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_1","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_10","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_2","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_3","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_4","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_5","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_6","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_7","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_8","score":0.5}
{"chain_id":"3KGTPGBS6XK146LOX0LT20JJNLOU2J_1_9","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_1","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_10","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_2","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_3","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_4","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_5","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_6","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_7","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_8","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDO62DKOZ_1_9","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_1","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_10","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_2","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_3","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_4","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_5","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_6","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_7","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_8","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOD2WOKT_1_9","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_1","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_10","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_2","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_3","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_4","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_5","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_6","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_7","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_8","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMCKOI_1_9","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_1","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_10","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_2","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_3","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_4","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_5","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_6","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_7","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_8","score":0.5}
{"chain_id":"3KIBXJ1WD5T18H5HQKFO3QDOMMOKOU_1_9","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_1","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_10","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_2","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_3","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_4","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_5","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_6","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_7","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_8","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37J3C2VJX_1_9","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_1","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_10","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_2","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_3","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_4","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_5","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_6","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_7","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_8","score":0.5}
{"chain_id":"3KJYX6QCM9A1NH8W9B1QX37JRH6JVN_1_9","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_1","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_10","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_2","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_3","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_4","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_5","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_6","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_7","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_8","score":0.5}
{"chain_id":"3KKG4CDWKIXDNSC8339QZJT3LHB49W_1_9","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_1","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_10","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_2","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_3","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_4","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_5","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_6","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_7","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_8","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AEW4FKG_1_9","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_1","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_10","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_2","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_3","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_4","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_5","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_6","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_7","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_8","score":0.5}
{"chain_id":"3KMS4QQVK2P724SORHWYGW4AI3YFKS_1_9","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_1","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_10","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_2","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_3","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_4","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_5","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_6","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_7","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_8","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTVMQSM7_1_9","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_1","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_10","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_2","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_3","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_4","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_5","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_6","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_7","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_8","score":0.5}
{"chain_id":"3KRVW3HTZNKBWXXDID9D28FTWEIMSE_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAX843RMH_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXBS8MRW_1_9","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_1","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_10","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_2","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_3","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_4","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_5","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_6","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_7","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_8","score":0.5}
{"chain_id":"3KV0LJBBH2KZVIX03O98CYAXC7SRMG_1_9","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_1","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_10","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_2","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_3","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_4","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_5","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_6","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_7","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_8","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCD2R5LL_1_9","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_1","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_10","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_2","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_3","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_4","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_5","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_6","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_7","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_8","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCFLAL5O_1_9","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_1","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_10","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_2","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_3","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_4","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_5","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_6","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_7","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_8","score":0.5}
{"chain_id":"3KWTYT08702QKDHH65VQ9KQCKOLL5A_1_9","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_1","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_10","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_2","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_3","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_4","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_5","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_6","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_7","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_8","score":0.5}
{"chain_id":"3KXIR214I4F2N8R9IUUBPWLPCM542R_1_9","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_1","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_10","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_2","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_3","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_4","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_5","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_6","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_7","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_8","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYMRSODY_1_9","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_1","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_10","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_2","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_3","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_4","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_5","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_6","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_7","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_8","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPHDDOR_1_9","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_1","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_10","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_2","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_3","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_4","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_5","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_6","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_7","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_8","score":0.5}
{"chain_id":"3KYQYYSHYV6TUBZ7Y3T1ZDIYPI4DOK_1_9","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_1","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_10","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_2","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_3","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_4","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_5","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_6","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_7","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_8","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW320416SY9_1_9","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_1","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_10","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_2","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_3","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_4","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_5","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_6","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_7","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_8","score":0.5}
{"chain_id":"3L0KT67Y8EFBQEMR1GXOW3205AJYSB_1_9","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_1","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_10","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_2","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_3","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_4","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_5","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_6","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_7","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_8","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHL8HUNJ_1_9","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_1","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_10","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_2","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_3","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_4","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_5","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_6","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_7","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_8","score":0.5}
{"chain_id":"3L2IS5HSFAHXTSAHJJJDUOMHYOWUN7_1_9","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_1","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_10","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_2","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_3","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_4","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_5","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_6","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_7","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_8","score":0.5}
{"chain_id":"3L4D84MILZRW5GDC4MKMI2GAHLGHJG_1_9","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_1","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_10","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_2","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_3","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_4","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_5","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_6","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_7","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_8","score":0.5}
{"chain_id":"3L4PIM1GQTFZPZMEMRXJ6TX4HY6YR7_1_9","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_1","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_10","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_2","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_3","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_4","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_5","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_6","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_7","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_8","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O18N254T_1_9","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_1","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_10","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_2","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_3","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_4","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_5","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_6","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_7","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_8","score":0.5}
{"chain_id":"3L6L49WXW0WUM5AW0DW9N3O1WXK45I_1_9","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_1","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_10","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_2","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_3","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_4","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_5","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_6","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_7","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_8","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT08ODAN_1_9","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_1","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_10","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_2","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_3","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_4","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_5","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_6","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_7","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_8","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYT0ILDA4_1_9","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_1","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_10","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_2","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_3","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_4","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_5","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_6","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_7","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_8","score":0.5}
{"chain_id":"3L70J4KAZGL4S756OKOJYIYTZEEDAO_1_9","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_1","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_10","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_2","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_3","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_4","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_5","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_6","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_7","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_8","score":0.5}
{"chain_id":"3LEIZ60CDJYTQP0XOWZGTF6CM719ZQ_1_9","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_1","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_10","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_2","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_3","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_4","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_5","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_6","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_7","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_8","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZGYSBDD_1_9","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_1","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_10","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_2","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_3","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_4","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_5","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_6","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_7","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_8","score":0.5}
{"chain_id":"3LEP4MGT3GZ9BHAYUYOFTTIZIO5BD8_1_9","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_1","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_10","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_2","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_3","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_4","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_5","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_6","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_7","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_8","score":0.5}
{"chain_id":"3LJ7UR74RHCYCUG24DSVHKONL0KN4P_1_9","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_1","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_10","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_2","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_3","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_4","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_5","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_6","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_7","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_8","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17J0UZOWP_1_9","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_1","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_10","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_2","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_3","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_4","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_5","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_6","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_7","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_8","score":0.5}
{"chain_id":"3LKC68YZ3A2S3HZYFYYRR17JAULWOT_1_9","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_1","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_10","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_2","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_3","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_4","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_5","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_6","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_7","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_8","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17IQGLH_1_9","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_1","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_10","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_2","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_3","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_4","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_5","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_6","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_7","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_8","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH17KIGLD_1_9","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_1","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_10","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_2","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_3","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_4","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_5","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_6","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_7","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_8","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1BA4LGO_1_9","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_1","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_10","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_2","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_3","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_4","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_5","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_6","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_7","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_8","score":0.5}
{"chain_id":"3LO69W1SU3CO0A61N1EHDHH1KWYGLU_1_9","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_1","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_10","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_2","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_3","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_4","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_5","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_6","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_7","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_8","score":0.5}
{"chain_id":"3LOTDFNYA7YYX4M5GVF147Y54FGFW6_1_9","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_1","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_10","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_2","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_3","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_4","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_5","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_6","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_7","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_8","score":0.5}
{"chain_id":"3LOZAJ85YDCTLAFJ25WGM7IN388X2D_1_9","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_1","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_10","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_2","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_3","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_4","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_5","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_6","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_7","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_8","score":0.5}
{"chain_id":"3LRLIPTPEQ8C6DBGG1A62VTJI2OKAE_1_9","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_1","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_10","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_2","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_3","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_4","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_5","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_6","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_7","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_8","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX5DHQOJ_1_9","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_1","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_10","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_2","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_3","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_4","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_5","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_6","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_7","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_8","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX6OQQOF_1_9","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_1","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_10","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_2","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_3","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_4","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_5","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_6","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_7","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_8","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX73AQOU_1_9","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_1","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_10","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_2","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_3","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_4","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_5","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_6","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_7","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_8","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX79KOQE_1_9","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_1","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_10","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_2","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_3","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_4","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_5","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_6","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_7","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_8","score":0.5}
{"chain_id":"3LS2AMNW5FPNJK3C3PZLZCPX7BDOQB_1_9","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_1","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_10","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_2","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_3","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_4","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_5","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_6","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_7","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_8","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJKZRFQ4_1_9","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_1","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_10","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_2","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_3","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_4","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_5","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_6","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_7","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_8","score":0.5}
{"chain_id":"3LWJHTCVCCLTD7QJ4MGVCIGJTD9FQN_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP4P8BKD_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DP5LZBKX_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPFLOKB5_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPGPBKB1_1_9","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_1","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_10","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_2","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_3","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_4","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_5","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_6","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_7","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_8","score":0.5}
{"chain_id":"3LYA37P8IQMHPNG8MFA2X6DPY8GKBQ_1_9","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_1","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_10","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_2","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_3","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_4","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_5","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_6","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_7","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_8","score":0.5}
{"chain_id":"3M0BCWMB8VV8KNAPBTT5LH7KSOWBW9_1_9","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_1","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_10","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_2","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_3","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_4","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_5","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_6","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_7","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_8","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVDMZ5S_1_9","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_1","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_10","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_2","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_3","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_4","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_5","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_6","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_7","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_8","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AVRTZ5R_1_9","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_1","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_10","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_2","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_3","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_4","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_5","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_6","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_7","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_8","score":0.5}
{"chain_id":"3M0NZ3JDP1XAP2YLL6MZHZ1AWL95Z2_1_9","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_1","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_10","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_2","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_3","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_4","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_5","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_6","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_7","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_8","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI748X6S4_1_9","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_1","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_10","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_2","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_3","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_4","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_5","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_6","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_7","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_8","score":0.5}
{"chain_id":"3M23Y66PO2697UCLVX2WDCI7JJ9S63_1_9","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_1","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_10","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_2","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_3","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_4","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_5","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_6","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_7","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_8","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBJJFR6T_1_9","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_1","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_10","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_2","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_3","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_4","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_5","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_6","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_7","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_8","score":0.5}
{"chain_id":"3M68NM076H6X6FC8G82RN2DBKXMR6T_1_9","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_1","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_10","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_2","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_3","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_4","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_5","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_6","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_7","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_8","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY2IVQBO_1_9","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_1","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_10","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_2","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_3","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_4","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_5","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_6","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_7","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_8","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPY4MUQBX_1_9","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_1","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_10","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_2","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_3","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_4","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_5","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_6","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_7","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_8","score":0.5}
{"chain_id":"3M81GAB8A0I30QE3ZKUZTSPYY9TBQL_1_9","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_1","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_10","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_2","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_3","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_4","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_5","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_6","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_7","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_8","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF69XNL_1_9","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_1","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_10","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_2","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_3","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_4","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_5","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_6","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_7","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_8","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLF9SXNA_1_9","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_1","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_10","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_2","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_3","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_4","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_5","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_6","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_7","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_8","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLFRMNXU_1_9","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_1","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_10","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_2","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_3","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_4","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_5","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_6","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_7","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_8","score":0.5}
{"chain_id":"3MAOD8E57Q9PAW4COOU0EVLLS0ANXD_1_9","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_1","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_10","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_2","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_3","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_4","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_5","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_6","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_7","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_8","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99UD8KKLI_1_9","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_1","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_10","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_2","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_3","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_4","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_5","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_6","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_7","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_8","score":0.5}
{"chain_id":"3MB8LZR5BFST2W2KDSZWB99USK2KL3_1_9","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_1","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_10","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_2","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_3","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_4","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_5","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_6","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_7","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_8","score":0.5}
{"chain_id":"3MD9PLUKKIDEFR4RP6ILBG1W01AZNQ_1_9","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_1","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_10","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_2","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_3","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_4","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_5","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_6","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_7","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_8","score":0.5}
{"chain_id":"3MHW492WW0CROPEHC8EIDVZ0R02MV5_1_9","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_1","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_10","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_2","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_3","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_4","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_5","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_6","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_7","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_8","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H1U63MF_1_9","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_1","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_10","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_2","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_3","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_4","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_5","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_6","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_7","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_8","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H2HUM3X_1_9","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_1","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_10","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_2","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_3","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_4","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_5","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_6","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_7","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_8","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H54XM3D_1_9","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_1","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_10","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_2","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_3","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_4","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_5","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_6","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_7","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_8","score":0.5}
{"chain_id":"3MMN5BL1WZ37CGCYSY8CU07H57P3MS_1_9","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_1","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_10","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_2","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_3","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_4","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_5","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_6","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_7","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_8","score":0.5}
{"chain_id":"3MRNMEIQW55LOQWALBD97WE4YJPDL9_1_9","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_1","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_10","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_2","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_3","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_4","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_5","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_6","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_7","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_8","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE7Y9WAE_1_9","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_1","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_10","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_2","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_3","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_4","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_5","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_6","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_7","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_8","score":0.5}
{"chain_id":"3MTMREQS4VH31D5X5FT9Q6NE87BWAZ_1_9","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_1","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_10","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_2","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_3","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_4","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_5","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_6","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_7","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_8","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZQ835X9_1_9","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_1","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_10","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_2","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_3","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_4","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_5","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_6","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_7","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_8","score":0.5}
{"chain_id":"3MX2NQ3YC9TLK7Y6KOYEKELZSATX5X_1_9","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_1","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_10","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_2","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_3","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_4","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_5","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_6","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_7","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_8","score":0.5}
{"chain_id":"3N1FSUEFL5ZPQIPPFJESLFCTATYD4A_1_9","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_1","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_10","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_2","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_3","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_4","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_5","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_6","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_7","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_8","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMR5MH8_1_9","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_1","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_10","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_2","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_3","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_4","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_5","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_6","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_7","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_8","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RMTXMH4_1_9","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_1","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_10","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_2","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_3","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_4","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_5","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_6","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_7","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_8","score":0.5}
{"chain_id":"3N2BF7Y2VQTM6OJX7JXEYU8RNR6MHA_1_9","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_1","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_10","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_2","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_3","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_4","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_5","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_6","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_7","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_8","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DRDZKUR_1_9","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_1","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_10","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_2","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_3","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_4","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_5","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_6","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_7","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_8","score":0.5}
{"chain_id":"3N4BPTXIO8RWLMPEM6RX2W5DS47UKS_1_9","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_1","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_10","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_2","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_3","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_4","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_5","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_6","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_7","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_8","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UX64COOM_1_9","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_1","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_10","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_2","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_3","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_4","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_5","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_6","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_7","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_8","score":0.5}
{"chain_id":"3N8OEVH1FRPKY8QWOC7HA7UXTVHOOW_1_9","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_1","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_10","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_2","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_3","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_4","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_5","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_6","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_7","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_8","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z0PF72X_1_9","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_1","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_10","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_2","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_3","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_4","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_5","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_6","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_7","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_8","score":0.5}
{"chain_id":"3NAPMVF0ZWEZ6V9SKSSIS85Z5UR27J_1_9","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_1","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_10","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_2","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_3","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_4","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_5","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_6","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_7","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_8","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ0ZQFOA_1_9","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_1","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_10","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_2","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_3","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_4","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_5","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_6","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_7","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_8","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ11JFO8_1_9","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_1","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_10","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_2","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_3","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_4","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_5","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_6","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_7","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_8","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJ2KUFOM_1_9","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_1","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_10","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_2","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_3","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_4","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_5","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_6","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_7","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_8","score":0.5}
{"chain_id":"3NC5L260MOLQSVD3P9ORNDLJY00FOK_1_9","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_1","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_10","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_2","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_3","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_4","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_5","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_6","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_7","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_8","score":0.5}
{"chain_id":"3ND9UOO81K1KXWW126IZZK3P2NNLWR_1_9","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_1","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_10","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_2","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_3","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_4","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_5","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_6","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_7","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_8","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9N2V8P8O_1_9","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_1","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_10","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_2","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_3","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_4","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_5","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_6","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_7","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_8","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NMM48P5_1_9","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_1","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_10","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_2","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_3","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_4","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_5","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_6","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_7","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_8","score":0.5}
{"chain_id":"3NG53N1RLVIZYGFHWVV02L9NPKXP8E_1_9","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_1","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_10","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_2","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_3","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_4","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_5","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_6","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_7","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_8","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG_1_9","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_1","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_10","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_2","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_3","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_4","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_5","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_6","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_7","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_8","score":0.5}
{"chain_id":"3NGI5ARFTT4HNGVWXAMLNBMFAWMP1O_1_9","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_1","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_10","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_2","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_3","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_4","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_5","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_6","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_7","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_8","score":0.5}
{"chain_id":"3NGMS9VZTLHWMI0AQ6510JC591TFFP_1_9","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_1","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_10","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_2","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_3","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_4","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_5","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_6","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_7","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_8","score":0.5}
{"chain_id":"3NJM2BJS4W51AJ5UD7B54756IXDCPD_1_9","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_1","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_10","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_2","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_3","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_4","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_5","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_6","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_7","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_8","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3BJ8QLL_1_9","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_1","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_10","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_2","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_3","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_4","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_5","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_6","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_7","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_8","score":0.5}
{"chain_id":"3NLZY2D53POFDZ0FQXJT7VL3Q79QLD_1_9","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_1","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_10","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_2","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_3","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_4","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_5","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_6","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_7","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_8","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOGEAXG6_1_9","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_1","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_10","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_2","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_3","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_4","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_5","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_6","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_7","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_8","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOH4MXGZ_1_9","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_1","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_10","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_2","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_3","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_4","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_5","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_6","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_7","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_8","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCOIQNGXS_1_9","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_1","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_10","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_2","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_3","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_4","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_5","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_6","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_7","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_8","score":0.5}
{"chain_id":"3NPFYT4IZC3J04NQ1KH5OBCORTGXGH_1_9","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_1","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_10","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_2","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_3","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_4","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_5","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_6","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_7","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_8","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A3NBPTC_1_9","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_1","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_10","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_2","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_3","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_4","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_5","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_6","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_7","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_8","score":0.5}
{"chain_id":"3NPI0JQDAO4IW075ZT6VTH5A51DTPC_1_9","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_1","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_10","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_2","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_3","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_4","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_5","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_6","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_7","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_8","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM71NTYVB_1_9","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_1","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_10","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_2","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_3","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_4","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_5","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_6","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_7","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_8","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM74CNVYJ_1_9","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_1","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_10","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_2","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_3","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_4","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_5","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_6","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_7","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_8","score":0.5}
{"chain_id":"3NQL1CS15R7RI63VVB2T7QM76ULVYJ_1_9","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_1","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_10","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_2","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_3","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_4","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_5","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_6","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_7","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_8","score":0.5}
{"chain_id":"3NS0A6KXC4785ZN5225QLWSZJO9ZGL_1_9","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_1","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_10","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_2","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_3","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_4","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_5","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_6","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_7","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_8","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD7L379K_1_9","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_1","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_10","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_2","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_3","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_4","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_5","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_6","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_7","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_8","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD89D797_1_9","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_1","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_10","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_2","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_3","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_4","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_5","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_6","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_7","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_8","score":0.5}
{"chain_id":"3NXNZ5RS1AWA6FUR517X2VDD8RL79F_1_9","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_1","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_10","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_2","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_3","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_4","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_5","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_6","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_7","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_8","score":0.5}
{"chain_id":"3O6CYIULED0NGMJVBKA0Q7Z8HBIUWS_1_9","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_1","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_10","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_2","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_3","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_4","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_5","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_6","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_7","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_8","score":0.5}
{"chain_id":"3O7L7BFSHEOOQV24W3RGLY4XGBBIEB_1_9","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_1","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_10","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_2","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_3","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_4","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_5","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_6","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_7","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_8","score":0.5}
{"chain_id":"3OCHAWUVGOJO2QJ9RB2KM34HOLUKXG_1_9","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_1","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_10","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_2","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_3","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_4","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_5","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_6","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_7","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_8","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHM4F9QUD_1_9","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_1","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_10","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_2","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_3","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_4","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_5","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_6","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_7","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_8","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMNPRUQ2_1_9","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_1","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_10","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_2","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_3","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_4","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_5","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_6","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_7","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_8","score":0.5}
{"chain_id":"3OE22WJIGINIWPN9ZBBUYIHMO7UUQ6_1_9","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_1","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_10","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_2","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_3","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_4","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_5","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_6","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_7","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_8","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS5SPAOO_1_9","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_1","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_10","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_2","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_3","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_4","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_5","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_6","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_7","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_8","score":0.5}
{"chain_id":"3OHYZ19UGC4VW4WVET2Z9CAS8E0AOA_1_9","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_1","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_10","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_2","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_3","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_4","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_5","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_6","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_7","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_8","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1ECWLAFY_1_9","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_1","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_10","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_2","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_3","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_4","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_5","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_6","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_7","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_8","score":0.5}
{"chain_id":"3OLF68YTN901QRJ2FQJ9MI1EF9LAFR_1_9","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_1","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_10","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_2","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_3","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_4","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_5","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_6","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_7","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_8","score":0.5}
{"chain_id":"3OLQQLKKNSOKL6MAELCGXZJXQK2JEB_1_9","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_1","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_10","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_2","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_3","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_4","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_5","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_6","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_7","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_8","score":0.5}
{"chain_id":"3ON104KXQKVOZOPGWEJID31EIX4W4F_1_9","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_1","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_10","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_2","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_3","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_4","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_5","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_6","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_7","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_8","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYL9QOB9_1_9","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_1","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_10","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_2","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_3","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_4","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_5","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_6","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_7","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_8","score":0.5}
{"chain_id":"3OONKJ5DKCI0FE1NK72V4NUYZO5BOJ_1_9","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_1","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_10","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_2","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_3","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_4","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_5","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_6","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_7","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_8","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E6LFV67_1_9","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_1","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_10","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_2","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_3","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_4","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_5","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_6","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_7","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_8","score":0.5}
{"chain_id":"3OS46CRSLFYPZMTPPS71OZ2E8056V4_1_9","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_1","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_10","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_2","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_3","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_4","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_5","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_6","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_7","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_8","score":0.5}
{"chain_id":"3OS4RQUCR9E691OUL4J5HTLKTIPFBO_1_9","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_1","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_10","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_2","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_3","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_4","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_5","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_6","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_7","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_8","score":0.5}
{"chain_id":"3OSWBBLG1EWGOKDZZUGXIQ9R4BCDX9_1_9","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_1","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_10","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_2","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_3","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_4","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_5","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_6","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_7","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_8","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL96359V50P7_1_9","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_1","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_10","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_2","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_3","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_4","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_5","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_6","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_7","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_8","score":0.5}
{"chain_id":"3OUYGIZWR7XHGRAE1RIL9635IT0P0W_1_9","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_1","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_10","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_2","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_3","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_4","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_5","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_6","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_7","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_8","score":0.5}
{"chain_id":"3OVHNO1VE605TFDE0C4IFBP26UOZD2_1_9","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_1","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_10","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_2","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_3","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_4","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_5","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_6","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_7","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_8","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJS4HD4QP_1_9","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_1","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_10","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_2","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_3","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_4","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_5","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_6","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_7","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_8","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSODF4Q3_1_9","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_1","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_10","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_2","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_3","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_4","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_5","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_6","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_7","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_8","score":0.5}
{"chain_id":"3OVR4I9USPIJFRTUK5KCLEJSVGFQ42_1_9","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_1","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_10","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_2","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_3","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_4","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_5","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_6","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_7","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_8","score":0.5}
{"chain_id":"3OWEPKL089BVVHYL6BQ2KW8XEH77NM_1_9","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_1","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_10","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_2","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_3","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_4","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_5","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_6","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_7","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_8","score":0.5}
{"chain_id":"3OXV7EAXLEP5NDR65I1V54AVCIV63P_1_9","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_1","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_10","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_2","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_3","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_4","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_5","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_6","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_7","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_8","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3B4OLOT_1_9","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_1","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_10","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_2","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_3","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_4","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_5","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_6","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_7","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_8","score":0.5}
{"chain_id":"3P1L2B7AD1OCSNNZBKRPIQQ3ZL3LOU_1_9","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_1","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_10","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_2","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_3","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_4","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_5","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_6","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_7","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_8","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP4E8LFP_1_9","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_1","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_10","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_2","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_3","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_4","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_5","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_6","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_7","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_8","score":0.5}
{"chain_id":"3P529IW9KYKIMAA6CH8ZVWHP532FLS_1_9","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_1","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_10","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_2","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_3","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_4","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_5","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_6","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_7","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_8","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XH4SHT2X_1_9","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_1","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_10","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_2","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_3","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_4","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_5","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_6","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_7","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_8","score":0.5}
{"chain_id":"3P59JYT76LJM4T6ZXVVJX4XHRATT2W_1_9","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_1","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_10","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_2","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_3","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_4","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_5","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_6","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_7","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_8","score":0.5}
{"chain_id":"3PB5A5BD0V5PLPHZJ7D7UCZ0FB8G7O_1_9","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_1","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_10","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_2","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_3","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_4","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_5","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_6","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_7","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_8","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENG6YZWA_1_9","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_1","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_10","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_2","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_3","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_4","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_5","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_6","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_7","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_8","score":0.5}
{"chain_id":"3PH3VY7DJLW42LD5H7987ZENHHPZWO_1_9","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_1","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_10","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_2","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_3","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_4","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_5","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_6","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_7","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_8","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPUDLJJO_1_9","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_1","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_10","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_2","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_3","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_4","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_5","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_6","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_7","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_8","score":0.5}
{"chain_id":"3PIWWX1FJJ5SWM82SMN7UFWPZ40JJQ_1_9","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_1","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_10","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_2","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_3","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_4","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_5","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_6","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_7","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_8","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM068A91G_1_9","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_1","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_10","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_2","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_3","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_4","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_5","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_6","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_7","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_8","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM08C819G_1_9","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_1","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_10","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_2","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_3","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_4","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_5","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_6","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_7","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_8","score":0.5}
{"chain_id":"3PJ71Z61R41WVTFPGLQO1LM0IG419U_1_9","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_1","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_10","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_2","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_3","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_4","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_5","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_6","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_7","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_8","score":0.5}
{"chain_id":"3PJUZCGDJ6FE6TZAF6Z3GV98NRH98U_1_9","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_1","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_10","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_2","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_3","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_4","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_5","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_6","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_7","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_8","score":0.5}
{"chain_id":"3PM8NZGV8YFADTH44GMHIPGQODQQXR_1_9","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_1","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_10","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_2","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_3","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_4","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_5","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_6","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_7","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_8","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RC8OC94_1_9","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_1","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_10","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_2","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_3","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_4","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_5","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_6","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_7","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_8","score":0.5}
{"chain_id":"3PMBY0YE272GIWPNWIF8IH5RDR4C9N_1_9","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_1","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_10","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_2","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_3","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_4","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_5","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_6","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_7","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_8","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1RAWZQU_1_9","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_1","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_10","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_2","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_3","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_4","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_5","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_6","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_7","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_8","score":0.5}
{"chain_id":"3PPTZCWALQJZIOHJ5YA2FAW1S54QZK_1_9","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_1","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_10","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_2","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_3","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_4","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_5","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_6","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_7","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_8","score":0.5}
{"chain_id":"3PQ8K71NHXJ6U02U4AXH8HQNGEFAAP_1_9","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_1","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_10","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_2","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_3","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_4","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_5","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_6","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_7","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_8","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VAIY9TG_1_9","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_1","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_10","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_2","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_3","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_4","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_5","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_6","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_7","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_8","score":0.5}
{"chain_id":"3PS7W85Z8Z1X4DRYI4AY7R5VYO1T93_1_9","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_1","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_10","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_2","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_3","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_4","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_5","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_6","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_7","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_8","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM1KT21B_1_9","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_1","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_10","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_2","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_3","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_4","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_5","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_6","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_7","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_8","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEM2AO21N_1_9","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_1","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_10","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_2","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_3","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_4","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_5","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_6","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_7","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_8","score":0.5}
{"chain_id":"3PW9OPU9PQJLV9UQVCB9RYEMZPO21E_1_9","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_1","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_10","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_2","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_3","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_4","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_5","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_6","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_7","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_8","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UASK28F_1_9","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_1","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_10","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_2","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_3","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_4","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_5","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_6","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_7","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_8","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UHRM28M_1_9","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_1","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_10","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_2","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_3","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_4","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_5","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_6","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_7","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_8","score":0.5}
{"chain_id":"3PWWM24LHSX369EBUXKO681UQFY82P_1_9","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_1","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_10","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_2","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_3","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_4","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_5","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_6","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_7","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_8","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY2YABU_1_9","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_1","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_10","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_2","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_3","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_4","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_5","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_6","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_7","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_8","score":0.5}
{"chain_id":"3PXX5PX6LXXBDA7MAOXD3SATY6GBAL_1_9","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_1","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_10","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_2","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_3","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_4","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_5","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_6","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_7","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_8","score":0.5}
{"chain_id":"3PZDLQMM0TK5IC4OB90T8UXD3EB2CK_1_9","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_1","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_10","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_2","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_3","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_4","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_5","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_6","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_7","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_8","score":0.5}
{"chain_id":"3Q5C1WP23M0DU6DDDVD7P5HYN1515C_1_9","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_1","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_10","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_2","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_3","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_4","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_5","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_6","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_7","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_8","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PKUU5CP_1_9","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_1","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_10","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_2","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_3","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_4","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_5","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_6","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_7","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_8","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PNZ2C5H_1_9","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_1","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_10","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_2","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_3","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_4","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_5","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_6","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_7","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_8","score":0.5}
{"chain_id":"3Q8GYXHFEP1XH9NBTS2R610PPSKC5N_1_9","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_1","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_10","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_2","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_3","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_4","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_5","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_6","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_7","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_8","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVC72D20T_1_9","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_1","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_10","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_2","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_3","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_4","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_5","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_6","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_7","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_8","score":0.5}
{"chain_id":"3QAPZX2QN4CLOK98ZT79DTVCRTP207_1_9","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_1","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_10","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_2","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_3","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_4","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_5","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_6","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_7","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_8","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBD3O4P_1_9","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_1","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_10","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_2","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_3","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_4","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_5","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_6","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_7","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_8","score":0.5}
{"chain_id":"3QBD8R3Z21IGUFGE5SS8W9OSBIH4OT_1_9","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_1","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_10","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_2","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_3","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_4","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_5","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_6","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_7","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_8","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T7B95TC_1_9","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_1","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_10","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_2","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_3","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_4","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_5","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_6","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_7","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_8","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0T8LU5TI_1_9","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_1","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_10","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_2","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_3","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_4","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_5","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_6","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_7","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_8","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TBGKT5P_1_9","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_1","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_10","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_2","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_3","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_4","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_5","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_6","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_7","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_8","score":0.5}
{"chain_id":"3QECW5O0KH0E3QPMFEXHVB0TF6B5TC_1_9","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_1","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_10","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_2","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_3","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_4","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_5","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_6","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_7","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_8","score":0.5}
{"chain_id":"3QEMNNSB2XYM9578HHCZORW334S7DC_1_9","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_1","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_10","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_2","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_3","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_4","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_5","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_6","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_7","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_8","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK3OF4P_1_9","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_1","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_10","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_2","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_3","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_4","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_5","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_6","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_7","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_8","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEK53F48_1_9","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_1","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_10","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_2","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_3","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_4","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_5","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_6","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_7","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_8","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFENTA4FJ_1_9","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_1","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_10","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_2","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_3","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_4","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_5","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_6","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_7","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_8","score":0.5}
{"chain_id":"3QFUFYSY9YEMO23L6P9I9FFEY08F4H_1_9","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_1","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_10","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_2","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_3","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_4","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_5","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_6","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_7","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_8","score":0.5}
{"chain_id":"3QIYRE09Y3GHKVJJHV9TJMHKD2Z1NR_1_9","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_1","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_10","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_2","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_3","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_4","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_5","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_6","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_7","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_8","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BC0ENCO_1_9","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_1","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_10","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_2","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_3","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_4","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_5","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_6","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_7","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_8","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BFWINCN_1_9","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_1","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_10","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_2","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_3","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_4","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_5","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_6","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_7","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_8","score":0.5}
{"chain_id":"3QL2OFSM96H17YTHXSYD0I0BIJINC0_1_9","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_1","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_10","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_2","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_3","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_4","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_5","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_6","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_7","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_8","score":0.5}
{"chain_id":"3QRYMNZ7FYGITFVSJET3PS0FYVLTN0_1_9","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_1","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_10","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_2","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_3","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_4","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_5","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_6","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_7","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_8","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3Q04309E_1_9","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_1","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_10","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_2","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_3","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_4","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_5","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_6","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_7","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_8","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYJ5098_1_9","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_1","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_10","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_2","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_3","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_4","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_5","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_6","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_7","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_8","score":0.5}
{"chain_id":"3QXNC7EIPIUWO4U7K2MONG3QYMB90T_1_9","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_1","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_10","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_2","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_3","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_4","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_5","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_6","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_7","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_8","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LE2FU0_1_9","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_1","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_10","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_2","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_3","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_4","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_5","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_6","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_7","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_8","score":0.5}
{"chain_id":"3QY5DC2MXRJL50X0LV00MJD8LJRFUZ_1_9","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_1","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_10","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_2","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_3","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_4","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_5","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_6","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_7","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_8","score":0.5}
{"chain_id":"3QY7M81QH7LUNBDI9YYMS4RTHR0K7Z_1_9","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_1","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_10","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_2","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_3","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_4","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_5","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_6","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_7","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_8","score":0.5}
{"chain_id":"3R08VXYT7CULIB7ZYCHPGFLOALNW71_1_9","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_1","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_10","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_2","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_3","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_4","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_5","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_6","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_7","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_8","score":0.5}
{"chain_id":"3R0T90IZ1SBVX6CVAOLIAYREF6ZGCK_1_9","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_1","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_10","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_2","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_3","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_4","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_5","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_6","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_7","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_8","score":0.5}
{"chain_id":"3R2PKQ87NW7M2JUHD1FZY696IOVIM8_1_9","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_1","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_10","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_2","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_3","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_4","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_5","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_6","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_7","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_8","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ467XO8_1_9","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_1","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_10","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_2","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_3","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_4","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_5","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_6","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_7","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_8","score":0.5}
{"chain_id":"3R2UR8A0IAF7SH4OP3UDTKLQ8PIXOP_1_9","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_1","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_10","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_2","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_3","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_4","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_5","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_6","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_7","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_8","score":0.5}
{"chain_id":"3R5F3LQFV2JWXC43QLIYQ511BKRZOK_1_9","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_1","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_10","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_2","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_3","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_4","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_5","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_6","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_7","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_8","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCA8WXF6_1_9","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_1","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_10","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_2","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_3","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_4","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_5","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_6","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_7","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_8","score":0.5}
{"chain_id":"3R6BYFZZP7BDM4RVQ0BN6QCCRV3XF4_1_9","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_1","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_10","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_2","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_3","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_4","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_5","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_6","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_7","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_8","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RI5TQTGB_1_9","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_1","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_10","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_2","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_3","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_4","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_5","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_6","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_7","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_8","score":0.5}
{"chain_id":"3R6P78PK7KACJNE6WAG8Z8RIYV9TGR_1_9","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_1","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_10","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_2","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_3","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_4","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_5","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_6","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_7","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_8","score":0.5}
{"chain_id":"3R8YZBNQ9HHGYEBDM3IJOQ32V1Q7QZ_1_9","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_1","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_10","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_2","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_3","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_4","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_5","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_6","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_7","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_8","score":0.5}
{"chain_id":"3R9WASFE2ZF2RZRARIZ83BSNKC9ZFU_1_9","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_1","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_10","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_2","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_3","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_4","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_5","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_6","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_7","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_8","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN1RIUBM_1_9","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_1","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_10","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_2","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_3","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_4","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_5","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_6","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_7","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_8","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN348UB4_1_9","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_1","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_10","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_2","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_3","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_4","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_5","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_6","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_7","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_8","score":0.5}
{"chain_id":"3RANCT1ZVFGVSJLKGTE43TMN3S0BUP_1_9","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_1","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_10","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_2","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_3","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_4","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_5","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_6","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_7","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_8","score":0.5}
{"chain_id":"3RGU30DZTA7IXUENVJ0ZA7O60HTMJU_1_9","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_1","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_10","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_2","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_3","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_4","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_5","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_6","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_7","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_8","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOCM3501_1_9","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_1","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_10","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_2","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_3","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_4","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_5","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_6","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_7","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_8","score":0.5}
{"chain_id":"3RJSC4XJ10TDNHSVHC97B0YOR6G05S_1_9","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_1","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_10","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_2","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_3","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_4","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_5","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_6","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_7","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_8","score":0.5}
{"chain_id":"3RRCEFRB7MBWBLR51NNMQPOT4J04BR_1_9","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_1","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_10","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_2","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_3","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_4","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_5","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_6","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_7","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_8","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL0CBEY9_1_9","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_1","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_10","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_2","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_3","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_4","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_5","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_6","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_7","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_8","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYL3GQEYZ_1_9","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_1","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_10","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_2","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_3","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_4","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_5","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_6","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_7","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_8","score":0.5}
{"chain_id":"3RSDURM96ALAGVH90LDJ7MYLZ9UEYL_1_9","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_1","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_10","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_2","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_3","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_4","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_5","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_6","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_7","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_8","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O1JPSPC_1_9","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_1","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_10","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_2","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_3","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_4","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_5","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_6","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_7","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_8","score":0.5}
{"chain_id":"3RU7GD8VPOSHH0UQAT15JC9O3K6SPX_1_9","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_1","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_10","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_2","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_3","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_4","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_5","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_6","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_7","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_8","score":0.5}
{"chain_id":"3RUIQRXJBBN4M2K2YSBXQ9M95IYLLI_1_9","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_1","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_10","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_2","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_3","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_4","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_5","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_6","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_7","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_8","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA2W4N0N_1_9","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_1","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_10","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_2","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_3","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_4","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_5","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_6","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_7","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_8","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA4QCN0L_1_9","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_1","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_10","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_2","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_3","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_4","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_5","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_6","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_7","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_8","score":0.5}
{"chain_id":"3RWE2M8QWH9HD6Y1LC5T5HYA5O20NL_1_9","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_1","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_10","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_2","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_3","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_4","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_5","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_6","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_7","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_8","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28N7QX1GY_1_9","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_1","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_10","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_2","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_3","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_4","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_5","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_6","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_7","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_8","score":0.5}
{"chain_id":"3RXPCZQMQPABA32XURWYT28NMHD1GB_1_9","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_1","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_10","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_2","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_3","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_4","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_5","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_6","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_7","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_8","score":0.5}
{"chain_id":"3RYC5T2D73S5GLUDV410T24SI05PRP_1_9","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_1","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_10","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_2","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_3","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_4","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_5","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_6","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_7","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_8","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QXOD1D3_1_9","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_1","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_10","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_2","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_3","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_4","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_5","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_6","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_7","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_8","score":0.5}
{"chain_id":"3S06PH7KSR38YJS6S1VQNH5QZYE1DQ_1_9","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_1","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_10","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_2","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_3","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_4","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_5","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_6","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_7","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_8","score":0.5}
{"chain_id":"3S0TNUHWKTHQ9JCRRM452RSYNPKD82_1_9","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_1","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_10","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_2","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_3","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_4","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_5","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_6","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_7","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_8","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYJ9IL4U_1_9","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_1","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_10","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_2","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_3","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_4","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_5","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_6","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_7","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_8","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYL834LY_1_9","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_1","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_10","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_2","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_3","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_4","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_5","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_6","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_7","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_8","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYLBKL42_1_9","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_1","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_10","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_2","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_3","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_4","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_5","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_6","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_7","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_8","score":0.5}
{"chain_id":"3S4AW7T80BH8OUDHS0CG4SKYUVV4L9_1_9","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_1","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_10","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_2","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_3","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_4","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_5","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_6","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_7","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_8","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOL1K6TD8_1_9","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_1","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_10","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_2","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_3","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_4","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_5","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_6","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_7","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_8","score":0.5}
{"chain_id":"3S96KQ6I9M39734FUKF0TNOLO7JDT2_1_9","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_1","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_10","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_2","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_3","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_4","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_5","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_6","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_7","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_8","score":0.5}
{"chain_id":"3SB4CE2TJVUIQDANFKPVSP1LFNTXAV_1_9","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_1","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_10","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_2","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_3","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_4","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_5","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_6","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_7","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_8","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD2Y3G0M_1_9","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_1","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_10","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_2","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_3","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_4","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_5","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_6","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_7","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_8","score":0.5}
{"chain_id":"3SB5N7Y3O33B3EHFY8SYFXPD6DPG06_1_9","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_1","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_10","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_2","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_3","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_4","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_5","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_6","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_7","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_8","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WD8H6IYE_1_9","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_1","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_10","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_2","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_3","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_4","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_5","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_6","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_7","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_8","score":0.5}
{"chain_id":"3SBEHTYCWN2MW0JVW43AS1WDS72YIQ_1_9","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_1","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_10","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_2","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_3","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_4","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_5","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_6","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_7","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_8","score":0.5}
{"chain_id":"3SITXWYCNV8N9NFFLYPRN0LRWP7XBM_1_9","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_1","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_10","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_2","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_3","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_4","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_5","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_6","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_7","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_8","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NO7HL8KG_1_9","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_1","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_10","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_2","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_3","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_4","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_5","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_6","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_7","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_8","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOUUU8K2_1_9","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_1","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_10","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_2","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_3","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_4","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_5","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_6","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_7","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_8","score":0.5}
{"chain_id":"3SKEMFQBZ34YNPI1J3QS64NOYP6K8K_1_9","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_1","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_10","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_2","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_3","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_4","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_5","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_6","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_7","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_8","score":0.5}
{"chain_id":"3SLE99ER0NCCEIFUMGDCKL125F6BZF_1_9","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_1","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_10","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_2","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_3","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_4","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_5","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_6","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_7","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_8","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7N3UKC2_1_9","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_1","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_10","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_2","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_3","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_4","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_5","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_6","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_7","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_8","score":0.5}
{"chain_id":"3SNVL38CI4R0ZS8E0F6X8QJ7XUHCK9_1_9","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_1","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_10","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_2","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_3","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_4","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_5","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_6","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_7","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_8","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD8HHYJL_1_9","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_1","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_10","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_2","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_3","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_4","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_5","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_6","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_7","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_8","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BD9ZHJY7_1_9","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_1","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_10","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_2","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_3","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_4","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_5","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_6","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_7","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_8","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCIAJY5_1_9","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_1","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_10","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_2","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_3","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_4","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_5","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_6","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_7","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_8","score":0.5}
{"chain_id":"3SPJ0334212IRB31EMOA42BDCLYJYZ_1_9","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_1","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_10","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_2","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_3","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_4","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_5","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_6","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_7","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_8","score":0.5}
{"chain_id":"3STRJBFXOWQHL9APM11NX0JBAXCTKR_1_9","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_1","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_10","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_2","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_3","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_4","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_5","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_6","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_7","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_8","score":0.5}
{"chain_id":"3SUWZRL0MYC8XB73U2IROVES6Q6E6E_1_9","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_1","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_10","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_2","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_3","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_4","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_5","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_6","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_7","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_8","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4D7A1R9W_1_9","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_1","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_10","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_2","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_3","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_4","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_5","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_6","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_7","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_8","score":0.5}
{"chain_id":"3T111IHZ5EPKOYE6EF537C4DMZN9RT_1_9","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_1","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_10","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_2","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_3","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_4","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_5","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_6","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_7","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_8","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3E42ST_1_9","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_1","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_10","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_2","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_3","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_4","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_5","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_6","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_7","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_8","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1V3EQS25_1_9","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_1","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_10","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_2","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_3","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_4","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_5","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_6","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_7","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_8","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VMVG2SM_1_9","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_1","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_10","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_2","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_3","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_4","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_5","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_6","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_7","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_8","score":0.5}
{"chain_id":"3TAYZSBPLL7LPTTK8VQTNZ1VPXK2SX_1_9","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_1","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_10","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_2","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_3","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_4","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_5","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_6","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_7","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_8","score":0.5}
{"chain_id":"3TDXMTX3CBTKDG9PQL8I9ACKUCL6I5_1_9","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_1","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_10","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_2","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_3","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_4","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_5","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_6","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_7","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_8","score":0.5}
{"chain_id":"3TE22NPXPBBCQM6WM8DZIBINWS3449_1_9","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_1","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_10","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_2","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_3","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_4","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_5","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_6","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_7","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_8","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZ13NR2Y_1_9","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_1","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_10","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_2","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_3","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_4","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_5","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_6","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_7","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_8","score":0.5}
{"chain_id":"3TE3O8573079OET7T6QOXPWZGPDR2B_1_9","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_1","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_10","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_2","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_3","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_4","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_5","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_6","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_7","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_8","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGS0V0DV_1_9","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_1","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_10","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_2","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_3","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_4","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_5","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_6","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_7","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_8","score":0.5}
{"chain_id":"3TEM0PF1Q5W8RU7OWIRQ9CMGVK30DA_1_9","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_1","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_10","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_2","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_3","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_4","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_5","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_6","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_7","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_8","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN31WMML_1_9","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_1","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_10","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_2","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_3","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_4","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_5","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_6","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_7","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_8","score":0.5}
{"chain_id":"3TESA3PJ3198Y2K8YGDRQZGN339MM2_1_9","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_1","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_10","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_2","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_3","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_4","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_5","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_6","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_7","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_8","score":0.5}
{"chain_id":"3TK8OJTYM1KX9SBU4O6AUZTVZ4PVPN_1_9","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_1","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_10","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_2","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_3","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_4","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_5","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_6","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_7","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_8","score":0.5}
{"chain_id":"3TMFV4NEP8DPIPCI8H9VUFHJGZM8WC_1_9","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_1","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_10","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_2","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_3","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_4","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_5","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_6","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_7","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_8","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D27NYCW8_1_9","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_1","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_10","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_2","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_3","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_4","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_5","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_6","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_7","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_8","score":0.5}
{"chain_id":"3TPWUS5F890RUS2VPTP0W1D2BV7CW1_1_9","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_1","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_10","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_2","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_3","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_4","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_5","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_6","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_7","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_8","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7FGP3T_1_9","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_1","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_10","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_2","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_3","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_4","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_5","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_6","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_7","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_8","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7IBP3U_1_9","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_1","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_10","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_2","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_3","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_4","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_5","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_6","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_7","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_8","score":0.5}
{"chain_id":"3TPZPLC3M0BDXJ9BKE04B41C7UA3PV_1_9","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_1","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_10","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_2","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_3","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_4","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_5","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_6","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_7","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_8","score":0.5}
{"chain_id":"3TS1AR6UQQDJ7PL48N7PCRZOCWEF7S_1_9","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_1","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_10","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_2","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_3","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_4","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_5","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_6","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_7","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_8","score":0.5}
{"chain_id":"3TU5ZICBRD0KYSGWW8AP2QZXU648QP_1_9","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_1","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_10","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_2","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_3","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_4","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_5","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_6","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_7","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_8","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPCV0Q1B_1_9","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_1","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_10","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_2","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_3","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_4","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_5","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_6","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_7","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_8","score":0.5}
{"chain_id":"3TUI152ZZBM2NSWBXN1ANGCPSZEQ1D_1_9","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_1","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_10","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_2","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_3","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_4","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_5","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_6","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_7","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_8","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HW71ZLXA_1_9","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_1","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_10","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_2","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_3","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_4","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_5","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_6","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_7","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_8","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN7FLXI_1_9","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_1","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_10","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_2","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_3","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_4","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_5","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_6","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_7","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_8","score":0.5}
{"chain_id":"3TVRFO09GKEZMW1RCBEL13HWN86XLN_1_9","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_1","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_10","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_2","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_3","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_4","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_5","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_6","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_7","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_8","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD68K3TWC_1_9","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_1","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_10","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_2","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_3","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_4","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_5","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_6","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_7","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_8","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SFMWT8_1_9","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_1","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_10","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_2","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_3","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_4","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_5","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_6","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_7","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_8","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6SI2TWR_1_9","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_1","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_10","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_2","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_3","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_4","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_5","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_6","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_7","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_8","score":0.5}
{"chain_id":"3TVSS0C0E1Z8G946BFKQLBD6WMPTWQ_1_9","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_1","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_10","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_2","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_3","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_4","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_5","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_6","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_7","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_8","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHW0RU4D_1_9","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_1","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_10","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_2","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_3","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_4","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_5","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_6","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_7","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_8","score":0.5}
{"chain_id":"3TXD01ZLD4GB7K0B1E4HVFGHXHSU4D_1_9","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_1","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_10","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_2","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_3","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_4","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_5","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_6","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_7","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_8","score":0.5}
{"chain_id":"3TXMY6UCAENMAV69DKQU4CVGWRFCQP_1_9","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_1","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_10","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_2","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_3","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_4","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_5","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_6","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_7","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_8","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH69GD0K4_1_9","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_1","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_10","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_2","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_3","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_4","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_5","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_6","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_7","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_8","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6EEL0KD_1_9","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_1","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_10","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_2","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_3","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_4","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_5","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_6","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_7","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_8","score":0.5}
{"chain_id":"3TY7ZAOG5FJG50DYOZDDDPH6OGY0K4_1_9","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_1","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_10","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_2","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_3","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_4","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_5","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_6","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_7","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_8","score":0.5}
{"chain_id":"3TYCR1GOTCIORR16DT26YOX7LLDLZX_1_9","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_1","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_10","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_2","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_3","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_4","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_5","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_6","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_7","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_8","score":0.5}
{"chain_id":"3U088ZLJVKS7007FDDWG10B1V01W00_1_9","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_1","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_10","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_2","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_3","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_4","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_5","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_6","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_7","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_8","score":0.5}
{"chain_id":"3U0SRXB7CD45D0I0FPO8PDZXEORNRG_1_9","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_1","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_10","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_2","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_3","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_4","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_5","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_6","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_7","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_8","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183050B71_1_9","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_1","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_10","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_2","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_3","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_4","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_5","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_6","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_7","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_8","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WN4B71_1_9","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_1","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_10","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_2","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_3","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_4","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_5","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_6","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_7","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_8","score":0.5}
{"chain_id":"3U4J9857OEATU89O3LLTT183WSO7BR_1_9","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_1","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_10","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_2","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_3","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_4","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_5","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_6","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_7","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_8","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MTFPHPC_1_9","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_1","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_10","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_2","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_3","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_4","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_5","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_6","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_7","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_8","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MU5RPH3_1_9","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_1","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_10","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_2","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_3","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_4","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_5","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_6","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_7","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_8","score":0.5}
{"chain_id":"3U5NZHP4LR1SRRGAZZP6FY2MVDWPHP_1_9","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_1","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_10","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_2","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_3","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_4","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_5","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_6","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_7","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_8","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWVEJ4ZE_1_9","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_1","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_10","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_2","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_3","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_4","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_5","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_6","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_7","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_8","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWWDG4ZA_1_9","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_1","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_10","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_2","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_3","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_4","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_5","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_6","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_7","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_8","score":0.5}
{"chain_id":"3U84XHCDICCSTJUL713PC7VWX1SZ4U_1_9","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_1","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_10","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_2","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_3","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_4","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_5","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_6","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_7","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_8","score":0.5}
{"chain_id":"3U8YCDAGXPF2G3BT14XA9BTFCQRQ0K_1_9","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_1","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_10","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_2","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_3","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_4","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_5","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_6","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_7","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_8","score":0.5}
{"chain_id":"3UN61F00HWO1NBCUBPSMVWZBM96R5K_1_9","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_1","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_10","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_2","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_3","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_4","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_5","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_6","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_7","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_8","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UAWTYMA_1_9","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_1","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_10","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_2","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_3","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_4","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_5","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_6","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_7","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_8","score":0.5}
{"chain_id":"3UNH76FOCS48SJ9MHJ12KU3UBAIYMS_1_9","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_1","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_10","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_2","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_3","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_4","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_5","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_6","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_7","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_8","score":0.5}
{"chain_id":"3UOUJI6MTDD25MOLLP6MSQDFRVJXUK_1_9","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_1","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_10","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_2","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_3","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_4","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_5","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_6","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_7","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_8","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQV5KZU2_1_9","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_1","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_10","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_2","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_3","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_4","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_5","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_6","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_7","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_8","score":0.5}
{"chain_id":"3URFVVM165HRAHO0M7U7PBTQZX1UZ2_1_9","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_1","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_10","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_2","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_3","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_4","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_5","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_6","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_7","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_8","score":0.5}
{"chain_id":"3UWN2HHPUY4HEFIDUEODFN4TZEVNS0_1_9","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_1","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_10","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_2","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_3","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_4","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_5","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_6","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_7","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_8","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXBXE7AE_1_9","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_1","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_10","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_2","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_3","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_4","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_5","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_6","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_7","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_8","score":0.5}
{"chain_id":"3UXUOQ9OKEWOBY4WX7LXYQOXIOKA7C_1_9","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_1","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_10","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_2","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_3","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_4","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_5","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_6","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_7","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_8","score":0.5}
{"chain_id":"3V0Z7YWSIYZ1HLAO2QVYYML2OL9V2U_1_9","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_1","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_10","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_2","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_3","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_4","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_5","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_6","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_7","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_8","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJACX23K_1_9","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_1","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_10","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_2","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_3","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_4","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_5","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_6","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_7","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_8","score":0.5}
{"chain_id":"3V5Q80FXIXQH5C85IGPSFRTJXGV32E_1_9","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_1","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_10","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_2","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_3","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_4","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_5","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_6","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_7","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_8","score":0.5}
{"chain_id":"3VA45EW49NMZ2GJVIA96YBHP3AQ1OO_1_9","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_1","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_10","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_2","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_3","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_4","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_5","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_6","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_7","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_8","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XATRO89_1_9","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_1","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_10","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_2","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_3","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_4","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_5","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_6","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_7","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_8","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XDYWO8R_1_9","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_1","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_10","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_2","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_3","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_4","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_5","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_6","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_7","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_8","score":0.5}
{"chain_id":"3VAR3R6G1P0HDG3GHVILDL4XR2TO8A_1_9","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_1","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_10","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_2","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_3","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_4","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_5","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_6","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_7","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_8","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3AISGR_1_9","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_1","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_10","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_2","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_3","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_4","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_5","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_6","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_7","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_8","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VW3XYGS5_1_9","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_1","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_10","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_2","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_3","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_4","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_5","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_6","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_7","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_8","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWY4YSGQ_1_9","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_1","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_10","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_2","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_3","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_4","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_5","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_6","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_7","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_8","score":0.5}
{"chain_id":"3VBEN272MKYB4N1R7ABIP3VWZ0GGSP_1_9","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_1","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_10","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_2","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_3","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_4","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_5","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_6","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_7","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_8","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G77FCON_1_9","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_1","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_10","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_2","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_3","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_4","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_5","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_6","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_7","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_8","score":0.5}
{"chain_id":"3VD82FOHKQNJPJTTY7YQF70G7AQOCG_1_9","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_1","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_10","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_2","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_3","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_4","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_5","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_6","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_7","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_8","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLA5F8M_1_9","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_1","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_10","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_2","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_3","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_4","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_5","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_6","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_7","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_8","score":0.5}
{"chain_id":"3VE8AYVF8MWN73QNISZVQRVJLAUF8B_1_9","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_1","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_10","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_2","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_3","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_4","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_5","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_6","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_7","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_8","score":0.5}
{"chain_id":"3VHHR074H3G57HV0UYAN74489JEL7Z_1_9","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_1","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_10","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_2","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_3","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_4","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_5","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_6","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_7","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_8","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KR8BTO9_1_9","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_1","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_10","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_2","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_3","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_4","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_5","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_6","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_7","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_8","score":0.5}
{"chain_id":"3VJ40NV2QIM0B0V4KTTG4H0KS9NOTJ_1_9","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_1","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_10","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_2","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_3","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_4","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_5","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_6","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_7","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_8","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6YSPTFX_1_9","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_1","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_10","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_2","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_3","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_4","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_5","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_6","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_7","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_8","score":0.5}
{"chain_id":"3VNL7UK1XFI65NIBLQAQHNR6ZBFTFQ_1_9","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_1","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_10","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_2","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_3","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_4","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_5","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_6","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_7","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_8","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGTZZ9VV_1_9","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_1","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_10","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_2","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_3","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_4","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_5","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_6","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_7","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_8","score":0.5}
{"chain_id":"3VNXK88KKCHCH5VNNZAD89TGWQDV9G_1_9","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_1","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_10","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_2","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_3","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_4","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_5","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_6","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_7","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_8","score":0.5}
{"chain_id":"3VSOLARPKB8S5WT43P47PE1X7A393Q_1_9","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_1","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_10","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_2","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_3","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_4","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_5","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_6","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_7","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_8","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITKEQKRI_1_9","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_1","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_10","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_2","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_3","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_4","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_5","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_6","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_7","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_8","score":0.5}
{"chain_id":"3W2LOLRXLBE45UXXICWSXLITOJSKRY_1_9","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_1","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_10","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_2","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_3","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_4","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_5","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_6","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_7","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_8","score":0.5}
{"chain_id":"3W8CV64QJ2Y7Z403IAT9T827KXY9H4_1_9","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_1","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_10","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_2","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_3","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_4","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_5","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_6","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_7","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_8","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE50B5VN_1_9","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_1","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_10","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_2","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_3","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_4","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_5","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_6","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_7","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_8","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOE8VZ5V4_1_9","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_1","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_10","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_2","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_3","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_4","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_5","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_6","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_7","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_8","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOEBMB5V1_1_9","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_1","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_10","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_2","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_3","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_4","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_5","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_6","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_7","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_8","score":0.5}
{"chain_id":"3W92K5RLWUGTGITBK9XWWTOECXG5VT_1_9","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_1","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_10","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_2","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_3","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_4","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_5","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_6","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_7","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_8","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVGEQ53W_1_9","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_1","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_10","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_2","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_3","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_4","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_5","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_6","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_7","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_8","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVJJX35E_1_9","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_1","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_10","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_2","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_3","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_4","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_5","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_6","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_7","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_8","score":0.5}
{"chain_id":"3WETL7AQWT7949RS0ZRQDYWVUR735F_1_9","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_1","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_10","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_2","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_3","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_4","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_5","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_6","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_7","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_8","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKETNA1Z_1_9","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_1","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_10","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_2","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_3","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_4","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_5","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_6","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_7","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_8","score":0.5}
{"chain_id":"3WJEQKOXA81A0CPU8R4OXGXKP2YA13_1_9","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_1","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_10","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_2","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_3","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_4","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_5","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_6","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_7","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_8","score":0.5}
{"chain_id":"3WLEIWSYHOGWZKF4X1EUJLUTFF2H2B_1_9","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_1","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_10","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_2","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_3","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_4","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_5","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_6","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_7","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_8","score":0.5}
{"chain_id":"3WMOAN2SRBWX67ZHO9TIQAO027MNVK_1_9","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_1","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_10","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_2","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_3","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_4","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_5","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_6","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_7","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_8","score":0.5}
{"chain_id":"3WOKGM4L71FZVRYDMR56K6YFZSUO03_1_9","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_1","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_10","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_2","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_3","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_4","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_5","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_6","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_7","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_8","score":0.5}
{"chain_id":"3WQ3B2KGE8FFJMUME54KSRHQQRRB14_1_9","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_1","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_10","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_2","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_3","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_4","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_5","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_6","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_7","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_8","score":0.5}
{"chain_id":"3WQQ9FUS6ATXUME7DQDZ714YKNCB8Z_1_9","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_1","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_10","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_2","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_3","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_4","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_5","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_6","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_7","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_8","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XMXM47R_1_9","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_1","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_10","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_2","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_3","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_4","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_5","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_6","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_7","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_8","score":0.5}
{"chain_id":"3WR9XG3T63A999OFO6B6LN2XQPP74L_1_9","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_1","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_10","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_2","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_3","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_4","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_5","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_6","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_7","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_8","score":0.5}
{"chain_id":"3WRFBPLXRANDUYXY4ZNC7FWH8C23NN_1_9","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_1","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_10","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_2","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_3","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_4","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_5","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_6","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_7","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_8","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGF1D1ATS_1_9","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_1","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_10","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_2","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_3","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_4","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_5","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_6","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_7","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_8","score":0.5}
{"chain_id":"3WSELTNVR31B9W13AAOY3MGFGWCTA3_1_9","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_1","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_10","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_2","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_3","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_4","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_5","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_6","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_7","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_8","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0H098CBX_1_9","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_1","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_10","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_2","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_3","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_4","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_5","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_6","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_7","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_8","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HURPBC7_1_9","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_1","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_10","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_2","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_3","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_4","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_5","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_6","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_7","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_8","score":0.5}
{"chain_id":"3WT783CTPBGZ6P4KU2T49N0HWVUCBN_1_9","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_1","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_10","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_2","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_3","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_4","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_5","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_6","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_7","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_8","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP43CSKP_1_9","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_1","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_10","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_2","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_3","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_4","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_5","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_6","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_7","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_8","score":0.5}
{"chain_id":"3WYGZ5XF3WEG69XAX1WXNVNP7Z2KS2_1_9","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_1","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_10","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_2","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_3","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_4","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_5","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_6","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_7","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_8","score":0.5}
{"chain_id":"3WYP994K17Q63GOUU3ULVY68MQNY6Z_1_9","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_1","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_10","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_2","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_3","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_4","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_5","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_6","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_7","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_8","score":0.5}
{"chain_id":"3X08E93BHVH4KWEOOKZTC7MFK2Z66H_1_9","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_1","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_10","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_2","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_3","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_4","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_5","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_6","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_7","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_8","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKWDCWSR_1_9","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_1","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_10","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_2","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_3","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_4","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_5","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_6","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_7","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_8","score":0.5}
{"chain_id":"3X0H8UUIT1N719RRMFF0B6HKXS9SWF_1_9","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_1","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_10","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_2","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_3","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_4","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_5","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_6","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_7","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_8","score":0.5}
{"chain_id":"3X1FV8S5JXQRWFIV15GN0QF3BJQGVH_1_9","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_1","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_10","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_2","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_3","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_4","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_5","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_6","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_7","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_8","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV37J1L4_1_9","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_1","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_10","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_2","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_3","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_4","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_5","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_6","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_7","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_8","score":0.5}
{"chain_id":"3X31TUMD7XLRWVGY5ITE6UDV6UNL15_1_9","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_1","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_10","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_2","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_3","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_4","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_5","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_6","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_7","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_8","score":0.5}
{"chain_id":"3X4JMASXCM8FCX94IM0KEMYGMLZ0BD_1_9","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_1","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_10","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_2","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_3","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_4","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_5","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_6","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_7","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_8","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCO849RWY_1_9","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_1","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_10","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_2","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_3","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_4","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_5","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_6","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_7","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_8","score":0.5}
{"chain_id":"3X4MXAO0BGNV0URE7QFVLWCOAAJRWM_1_9","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_1","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_10","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_2","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_3","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_4","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_5","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_6","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_7","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_8","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWM8WYLCN_1_9","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_1","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_10","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_2","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_3","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_4","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_5","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_6","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_7","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_8","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMC42CL2_1_9","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_1","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_10","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_2","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_3","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_4","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_5","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_6","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_7","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_8","score":0.5}
{"chain_id":"3X65QVEQI0MBJYQWLFIYNQWMNBVLCT_1_9","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_1","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_10","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_2","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_3","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_4","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_5","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_6","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_7","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_8","score":0.5}
{"chain_id":"3X66WABAJWH7K6GCHVAHHVNKA86G31_1_9","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_1","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_10","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_2","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_3","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_4","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_5","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_6","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_7","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_8","score":0.5}
{"chain_id":"3X73LLYYQ1DSO64XJKCEB9XRT3QNHU_1_9","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_1","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_10","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_2","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_3","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_4","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_5","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_6","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_7","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_8","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZY4Z2SQW_1_9","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_1","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_10","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_2","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_3","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_4","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_5","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_6","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_7","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_8","score":0.5}
{"chain_id":"3X87C8JFV6A2HCV5A6GUJHZYJLXSQE_1_9","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_1","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_10","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_2","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_3","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_4","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_5","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_6","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_7","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_8","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQ0UNTLV_1_9","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_1","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_10","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_2","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_3","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_4","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_5","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_6","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_7","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_8","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQBYKLT3_1_9","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_1","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_10","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_2","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_3","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_4","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_5","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_6","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_7","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_8","score":0.5}
{"chain_id":"3XC1O3LBOSLS5FS771DOC0WQZ72LTR_1_9","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_1","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_10","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_2","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_3","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_4","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_5","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_6","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_7","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_8","score":0.5}
{"chain_id":"3XIQGXAUMC707BCP8HDBIYZVFQWX70_1_9","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_1","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_10","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_2","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_3","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_4","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_5","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_6","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_7","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_8","score":0.5}
{"chain_id":"3XLBSAQ9Z4BPC6C49Z1WFJF60J2Z76_1_9","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_1","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_10","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_2","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_3","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_4","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_5","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_6","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_7","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_8","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV590DJEP6_1_9","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_1","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_10","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_2","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_3","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_4","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_5","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_6","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_7","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_8","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59WCFPE7_1_9","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_1","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_10","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_2","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_3","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_4","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_5","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_6","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_7","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_8","score":0.5}
{"chain_id":"3XM0HYN6NKYG7HP89YH0UV59YBLEP2_1_9","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_1","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_10","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_2","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_3","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_4","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_5","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_6","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_7","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_8","score":0.5}
{"chain_id":"3XUHV3NRVKXOYHYRFKGSHSX54M0H5I_1_9","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_1","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_10","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_2","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_3","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_4","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_5","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_6","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_7","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_8","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA3MAA0P_1_9","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_1","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_10","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_2","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_3","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_4","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_5","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_6","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_7","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_8","score":0.5}
{"chain_id":"3XXU1SWE8MUATN4CC80OJBEA94Y0A9_1_9","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_1","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_10","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_2","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_3","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_4","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_5","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_6","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_7","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_8","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6J0ZVDE_1_9","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_1","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_10","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_2","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_3","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_4","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_5","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_6","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_7","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_8","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KU6VDA_1_9","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_1","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_10","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_2","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_3","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_4","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_5","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_6","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_7","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_8","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6KYBVDN_1_9","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_1","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_10","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_2","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_3","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_4","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_5","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_6","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_7","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_8","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6MOMVDG_1_9","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_1","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_10","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_2","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_3","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_4","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_5","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_6","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_7","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_8","score":0.5}
{"chain_id":"3Y4W8Q93LZJOKV84ZFFFU5C6O1IVD4_1_9","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_1","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_10","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_2","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_3","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_4","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_5","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_6","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_7","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_8","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31SNFPID_1_9","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_1","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_10","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_2","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_3","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_4","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_5","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_6","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_7","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_8","score":0.5}
{"chain_id":"3Y5140Z9DXFSNMRU5H7RFA31T1UPIL_1_9","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_1","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_10","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_2","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_3","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_4","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_5","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_6","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_7","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_8","score":0.5}
{"chain_id":"3Y54SXRO1LKVO5F1GF5P3NS9EMXTU9_1_9","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_1","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_10","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_2","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_3","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_4","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_5","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_6","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_7","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_8","score":0.5}
{"chain_id":"3YDGXNSEOZTD9NTYYB3CZNYSX5F48D_1_9","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_1","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_10","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_2","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_3","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_4","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_5","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_6","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_7","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_8","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKJVK41W_1_9","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_1","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_10","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_2","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_3","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_4","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_5","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_6","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_7","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_8","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKWCQ14A_1_9","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_1","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_10","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_2","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_3","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_4","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_5","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_6","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_7","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_8","score":0.5}
{"chain_id":"3YDTZAI2WXFVYN9DZQUXKDBKXTS14B_1_9","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_1","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_10","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_2","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_3","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_4","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_5","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_6","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_7","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_8","score":0.5}
{"chain_id":"3YGXWBAF70GFLQJBFNJH19UBM86C4K_1_9","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_1","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_10","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_2","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_3","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_4","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_5","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_6","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_7","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_8","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQ6KGHN_1_9","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_1","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_10","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_2","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_3","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_4","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_5","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_6","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_7","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_8","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUQNJGHK_1_9","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_1","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_10","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_2","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_3","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_4","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_5","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_6","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_7","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_8","score":0.5}
{"chain_id":"3YMU66OBIN7MEENBWGZJLPOUU04HG0_1_9","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_1","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_10","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_2","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_3","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_4","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_5","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_6","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_7","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_8","score":0.5}
{"chain_id":"3YOH7BII096WY1EERW12YI7W620KV9_1_9","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_1","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_10","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_2","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_3","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_4","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_5","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_6","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_7","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_8","score":0.5}
{"chain_id":"3YT88D1N08XCMSCV7MVWFNFDSY13KY_1_9","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_1","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_10","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_2","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_3","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_4","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_5","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_6","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_7","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_8","score":0.5}
{"chain_id":"3YW4XOSQKQKUFL3SEWLFXH9EM6CU1D_1_9","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_1","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_10","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_2","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_3","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_4","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_5","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_6","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_7","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_8","score":0.5}
{"chain_id":"3YWRV122CSYCQLNDDHUUCRWMTO78UH_1_9","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_1","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_10","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_2","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_3","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_4","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_5","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_6","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_7","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_8","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5KTACU2_1_9","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_1","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_10","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_2","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_3","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_4","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_5","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_6","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_7","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_8","score":0.5}
{"chain_id":"3YZ8UPK3VTLE2ODQUTAZEDS5ODRUC9_1_9","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_1","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_10","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_2","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_3","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_4","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_5","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_6","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_7","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_8","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGMF2E2F_1_9","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_1","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_10","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_2","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_3","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_4","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_5","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_6","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_7","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_8","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGNTUE20_1_9","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_1","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_10","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_2","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_3","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_4","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_5","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_6","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_7","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_8","score":0.5}
{"chain_id":"3Z2R0DQ0JHDKFAO2706OYIXGO4DE26_1_9","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_1","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_10","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_2","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_3","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_4","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_5","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_6","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_7","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_8","score":0.5}
{"chain_id":"3Z4AIRP3C6CMWPXNJ1W2HO8IPXQX1W_1_9","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_1","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_10","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_2","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_3","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_4","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_5","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_6","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_7","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_8","score":0.5}
{"chain_id":"3Z4XG4ZF48Q47PH8IM5HWYVQTKXX85_1_9","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_1","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_10","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_2","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_3","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_4","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_5","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_6","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_7","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_8","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB104M8Z7_1_9","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_1","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_10","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_2","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_3","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_4","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_5","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_6","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_7","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_8","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB11JPZ8W_1_9","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_1","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_10","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_2","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_3","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_4","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_5","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_6","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_7","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_8","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1Z8I8ZA_1_9","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_1","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_10","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_2","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_3","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_4","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_5","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_6","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_7","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_8","score":0.5}
{"chain_id":"3Z7ISHFUH0UTCKHNJ4T2TJB1ZFU8Z0_1_9","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_1","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_10","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_2","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_3","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_4","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_5","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_6","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_7","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_8","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNUE0KH8_1_9","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_1","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_10","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_2","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_3","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_4","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_5","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_6","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_7","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_8","score":0.5}
{"chain_id":"3Z9WI9EOZZNRG0JUM7KYJHGNYJRHKA_1_9","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_1","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_10","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_2","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_3","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_4","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_5","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_6","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_7","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_8","score":0.5}
{"chain_id":"3ZAK8W07I4DU8WIAIDHFJCQ4HRQ0UQ_1_9","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_1","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_10","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_2","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_3","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_4","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_5","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_6","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_7","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_8","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57K12ECZD_1_9","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_1","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_10","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_2","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_3","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_4","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_5","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_6","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_7","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_8","score":0.5}
{"chain_id":"3ZAZR5XV01HVON700G97V57KRHLCZ4_1_9","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_1","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_10","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_2","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_3","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_4","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_5","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_6","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_7","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_8","score":0.5}
{"chain_id":"3ZDAD0O1T1CN599WLKGCNURD0CZTXU_1_9","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_1","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_10","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_2","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_3","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_4","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_5","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_6","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_7","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_8","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OAW8TZ8_1_9","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_1","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_10","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_2","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_3","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_4","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_5","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_6","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_7","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_8","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OBC1ZT4_1_9","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_1","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_10","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_2","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_3","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_4","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_5","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_6","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_7","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_8","score":0.5}
{"chain_id":"3ZGVPD4G6TGCA49BM24XKF7OC9GZTE_1_9","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_1","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_10","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_2","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_3","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_4","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_5","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_6","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_7","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_8","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ5OS9_1_9","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_1","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_10","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_2","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_3","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_4","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_5","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_6","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_7","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_8","score":0.5}
{"chain_id":"3ZOTGHDK5IAZW0IPVTOQUC4YRZ6OSA_1_9","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_1","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_10","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_2","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_3","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_4","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_5","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_6","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_7","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_8","score":0.5}
{"chain_id":"3ZPPDN2SLVVVFU91S9L3855UXK79E0_1_9","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_1","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_10","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_2","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_3","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_4","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_5","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_6","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_7","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_8","score":0.5}
{"chain_id":"3ZQIG0FLQEFNT18VGAK5MT1RQGCWVV_1_9","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_1","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_10","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_2","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_3","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_4","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_5","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_6","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_7","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_8","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SABCZ04P_1_9","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_1","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_10","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_2","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_3","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_4","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_5","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_6","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_7","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_8","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SADLM04W_1_9","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_1","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_10","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_2","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_3","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_4","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_5","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_6","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_7","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_8","score":0.5}
{"chain_id":"3ZR9AIQJUB8VRYOV37QX68SAHS704Z_1_9","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_1","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_10","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_2","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_3","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_4","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_5","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_6","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_7","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_8","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVK2GBSFR_1_9","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_1","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_10","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_2","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_3","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_4","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_5","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_6","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_7","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_8","score":0.5}
{"chain_id":"3ZSANO2JCF65QN5WWQ507IVKCJSSFO_1_9","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_1","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_10","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_2","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_3","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_4","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_5","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_6","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_7","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_8","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z22AHRO6_1_9","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_1","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_10","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_2","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_3","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_4","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_5","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_6","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_7","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_8","score":0.5}
{"chain_id":"3ZSY5X72NXANVLICG4OL42Z23NGORT_1_9","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_1","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_10","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_2","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_3","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_4","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_5","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_6","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_7","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_8","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98EGDW3B_1_9","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_1","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_10","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_2","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_3","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_4","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_5","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_6","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_7","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_8","score":0.5}
{"chain_id":"3ZV9H2YQQD63HS6CW0EZ3Y98ZXCW3T_1_9","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_1","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_10","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_2","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_3","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_4","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_5","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_6","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_7","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_8","score":0.5}
{"chain_id":"3ZWFC4W1UU6TP85JH15VH8QOA39FRF_1_9","score":0.5}
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/eqasc/data/dummy_predictions_dev.jsonl/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/eqasc/data/dummy_predictions_dev.jsonl",
"repo_id": "ContextualSP",
"token_count": 353770
}
| 242 |
## ProPara datasets
This directory contains dev, train and test datasets.
* [dev](dev/) contains the dev dataset for developing your predictor
* [train](train/) contains the training dataset for evaluating your predictor during development
* [test](test/) contains the test dataset for evaluation on the [ProPara Leaderboard](https://leaderboard.allenai.org/).
Each subdirectory contains `answers.tsv` and `dummy-predictions.tsv` files. In
addition each has a `sentences.tsv` file as a convenience to discover the
process paragraphs for each process id.
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/data/README.md/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/data/README.md",
"repo_id": "ContextualSP",
"token_count": 147
}
| 243 |
# ProPara Evaluator
## Example
```
% export PYTHONPATH=.
% python3 evaluator.py --predictions testfiles-1/predictions.tsv --answers testfiles-1/answers.tsv --output /tmp/metrics.json
=================================================
Question Avg. Precision Avg. Recall Avg. F1
-------------------------------------------------
Inputs 0.793 0.597 0.681
Outputs 0.739 0.593 0.658
Conversions 0.878 0.200 0.326
Moves 0.563 0.331 0.417
-------------------------------------------------
Overall Precision 0.743
Overall Recall 0.430
Overall F1 0.545
=================================================
Evaluated 54 predictions against 54 answers.
% cat /tmp/metrics.json
{"precision": 0.743, "recall": 0.43, "f1": 0.545}
```
## Usage
The script requires prediction and answer input files, and produces a report to
standard out. You'll need Python 3.6 or newer to run it.
Optional:
* the argument `--output` writes the overall precision, recall and F1 score to a JSON file.
* the argument `--diagnostics` writes a diagnostic file with process summaries (an intermediate representation of each process) and their scores.
* the argument `--sentences` reads sentences from a sentences file and includes them in the diagnostics file. See [sentences.tsv files in the ../data directory](../data/).
## Evaluation process
### Overview
The task is to predict what happens to specific participants in each sentence
of a process paragraph. This prediction for a process is summarized to answer
four key questions:
1. **What are the Inputs?** (i.e., Which participants existed before the process began, and don't exist after the process ended? Or, what participants were consumed?)
2. **What are the Outputs?** (i.e., Which participants existed after the process ended, but didn't exist before the process began? Or, what participants were produced?)
3. **What are the Conversions?** (i.e., Which participants were converted to which other participants?)
4. **What are the Moves?** (i.e., Which participants moved from one location to another?)
The proposed answers (prediction) to these questions are compared to the
correct answers (gold) to arrive at a score for each question.
These scores are aggregated over all processes to arrive at a final performance
score represented by precision, recall and F1 calculations.
### Details
The process of evaluating predictions can be divided into four steps.
#### Step 1. Reading the action file
An **action file** is a file with tab-separated values representing a table of
actions for participants in sentences of process paragraphs.
For example, consider process 1167 from the training set, consisting of five
sentences:
> ① The gravity of the sun pulls its mass inward. ② There is a lot of pressure
> on the Sun. ③ The pressure forces atoms of hydrogen to fuse together in
> nuclear reactions. ④ The energy from the reactions gives off different kinds
> of light. ⑤ The light travels to the Earth.
For this process paragraph, the objective is to decide what happens to the two
participants "atoms of hydrogen" and "sunlight; light" by filling in the three
blank columns in a six-column table:
| ProcessID | Sentence | Participant(s) | Action | Location Before | Location After |
| --------- | -------- | ----------------- | ------ | ----------------- | -------------- |
| 1167 | 1 | atoms of hydrogen | | | |
| 1167 | 1 | sunlight; light | | | |
| 1167 | 2 | atoms of hydrogen | | | |
| 1167 | 2 | sunlight; light | | | |
| 1167 | 3 | atoms of hydrogen | | | |
| 1167 | 3 | sunlight; light | | | |
| 1167 | 4 | atoms of hydrogen | | | |
| 1167 | 4 | sunlight; light | | | |
| 1167 | 5 | atoms of hydrogen | | | |
| 1167 | 5 | sunlight; light | | | |
A TSV file with this table is called an **action file** because each row in the
table represents what happens to a specific participant in a specific sentence
in a specific process paragraph.
The first three columns are fixed, and provided to you:
* **ProcessID** (column 1): This is the identifier for the process paragraph.
* **Sentence** (column 2): This is the sentence number (starting with 1) in the process paragraph.
* **Participant(s)** (column 3): This is usually a span from the process paragraph identifying an interesting participant. It may contain a `;` character to delimit alternative participants (e.g., coreferent mentions of `sunlight` and `light`.) See section **Scoring each process** for another example.
Your prediction action file must contain these three columns verbatim to be admitted for evaluation.
The last three columns are to be predicted.
* **Action** (column 4): This should describe what happens to the participant in this sentence. It must be one of `NONE`, `CREATE`, `MOVE`, or `DESTROY`.
* **Location Before** (column 5): This should describe the location of the participant (column 3) before this sentence (column 2).
* **Location After** (column 6): This should describe the location of the participant (column 3) after this sentence (column 2).
Specifically, there are rules for the locations based on the kind of action happening.
* If the **Action** is `NONE`, then...
* **Location Before** and **Location After** must be equal. The special value `?` means the participant's location is unknown to the predictor, and the special value `-` means the participant doesn't exist.
* If the **Action** is `CREATE`, then...
* **Location Before** must be `-` to mean that the participant didn't exist before this sentence.
* **Location After** is the location where the participant was created. The special value `?` means the participant was created but the location is unknown to the predictor.
* If the **Action** is `MOVE`, then...
* **Location Before** is the location from where the participant moved. The special value `?` means the participant existed before this sentence, but its location is unknown to the predictor.
* **Location After** is the location to where the participant moved. The special value `?` means the participant existed after this sentence, but its location is unknown to the predictor.
* If the **Action** is `DESTROY`, then...
* **Location Before** is the location where the participant was destroyed. The special value `?` means the participant was destroyed but the location is unknown to the predictor.
* **Location After** must be `-` to mean that the participant didn't exist after this sentence.
If your prediction file does not meet these requirements, evaluation will abort.
For example, a valid prediction for the above paragraph might be:
| ProcessID | Sentence | Participant(s) | Action | Location Before | Location After |
| --------- | -------- | ----------------- | ------ | ----------------- | -------------- |
| 1167 | 1 | atoms of hydrogen | NONE | - | - |
| 1167 | 1 | sunlight; light | NONE | - | - |
| 1167 | 2 | atoms of hydrogen | NONE | - | - |
| 1167 | 2 | sunlight; light | NONE | - | - |
| 1167 | 3 | atoms of hydrogen | DESTROY | star | - |
| 1167 | 3 | sunlight; light | NONE | - | - |
| 1167 | 4 | atoms of hydrogen | NONE | - | - |
| 1167 | 4 | sunlight; light | CREATE | - | star |
| 1167 | 5 | atoms of hydrogen | NONE | - | - |
| 1167 | 5 | sunlight; light | MOVE | star | soil |
This means:
* In paragraph 1167, sentence 3, the participant `atoms of hydrogen` is destroyed at `star`
* In paragraph 1167, sentence 4, the participant `sunlight; light` is created at `star`
* In paragraph 1167, sentence 5, the participant `sunlight; light` is moved from `star` to `soil`
**Note:** This is a somewhat contrived example to illustrate the scoring mechanism below.
For comparison, the action file with the correct ("gold") answers is:
| ProcessID | Sentence | Participant(s) | Action | Location Before | Location After |
| --------- | -------- | ----------------- | ------ | ----------------- | -------------- |
| 1167 | 1 | atoms of hydrogen | NONE | sun | sun |
| 1167 | 1 | sunlight; light | NONE | sun | sun |
| 1167 | 2 | atoms of hydrogen | DESTROY | sun | - |
| 1167 | 2 | sunlight; light | NONE | - | - |
| 1167 | 3 | atoms of hydrogen | NONE | - | - |
| 1167 | 3 | sunlight; light | NONE | - | - |
| 1167 | 4 | atoms of hydrogen | NONE | - | - |
| 1167 | 4 | sunlight; light | CREATE | - | sun |
| 1167 | 5 | atoms of hydrogen | NONE | - | - |
| 1167 | 5 | sunlight; light | MOVE | sun | earth |
That is:
* In paragraph 1167, sentence 3, the participant `atoms of hydrogen` is destroyed at `sun`
* In paragraph 1167, sentence 4, the participant `sunlight; light` is created at `sun`
* In paragraph 1167, sentence 5, the participant `sunlight; light` is moved from `sun` to `earth`
**Note:** You can use the `explainer.py` program to parse action files into explanations like the above.
Next, these two action files are summarized.
#### Step 2. Summarizing each process
To compare predicted actions to answer actions, each process paragraph in these
tables is summarized into answers to the four questions described above.
For the above predictions, the internal summary can be seen in the diagnostics
output like this:
```json
{
"prediction_summary": {
"process_id": 1167,
"inputs": { "participants": [ "atoms of hydrogen" ] },
"outputs": { "participants": [ "sunlight OR light" ] },
"conversions": null,
"moves": [
{
"participants": "sunlight OR light", "step_number": 5,
"location_before": "star", "location_after": "soil"
}
]
}
}
```
For the corresponding answer, the internal summary in the diagnostics output looks like this:
```json
{
"answer_summary": {
"process_id": 1167,
"inputs": { "participants": [ "atoms of hydrogen" ] },
"outputs": { "participants": [ "sunlight OR light" ] },
"conversions": [
{
"participants_destroyed": "atoms of hydrogen",
"participants_created": "sunlight OR light",
"location": "sun", "step_number": 3
}
],
"moves": [
{
"participants": "sunlight OR light", "step_number": 5,
"location_before": "sun", "location_after": "earth"
}
]
}
}
```
To read the code that summarizes process paragraphs from an action file, look
at the function `summarize_actions_file` and its uses.
#### Step 3. Scoring each process
The summary of a prediction can be compared to the corresponding answer by
assigning precision and recall scores to each of these four questions.
Internally, this is represented in JSON like this:
```json
{
"score": {
"process_id": 1167,
"inputs": { "precision": 1, "recall": 1 },
"outputs": { "precision": 1, "recall": 1 },
"conversions": { "precision": 1, "recall": 0 },
"moves": { "precision": 0.3333333333333333, "recall": 0.3333333333333333 }
}
}
```
In this case, the precision and recall of **What are the Inputs?** and **What
are the Outputs?** is 1, because the prediction and answer both have the same
summarization, even though the predicted locations of `CREATE` and `DESTROY`
actions differ.
Since the prediction did not describe any conversions, but the answers do, the
recall for **What are the Conversions?** is 0.
Finally, while the prediction did correctly describe that the participant
`sunlight OR light` moved in sentence 5, the before and after locations are not
correct. Therefore, the precision and recall suffer for the question **What are
the Moves?**
To read the code that compares these summaries, look at the functions
`_score_inputs`, `_score_outputs`, `_score_conversions`, and `_score_moves` and
their use.
**Note about location comparison:**
> Locations are normalized before comparison.
>
> Since you have to discover locations in the paragraph yourself when predicting
> actions for specific participants, your locations may differ slightly from the
> ones in the answers.
>
> For example, if the paragraph has `Ants walk on branches` and `The ant fell off
> a branch` then the location of the ant could be written as `branches`, `a
> branch`, or `branch`. Since your chosen span of the paragraph for this location
> may differ from the correct answer, a process of normalization resolves any of
> these variants to `branch`: first by lower casing the string, then removing
> starting articles like `a` and finally using the Porter stemming algorithm to
> settle on the final string.
>
> To see this in the code, look for the function `_compare_locations` and its uses.
**Note about participant comparison:**
> Participants are not normalized before comparison.
>
> Your predictor may have selected a participant from the process paragraph that is
> not the one chosen for you in the answer. From the example above, if you predict
> an action on the participant `Ants`, but the answer action is on participant `ant`,
> your predicted action will not be matched.
>
> To see this in the code, look for the function `_compare_participants` and its uses.
>
> If your prediction refers to participants that are not in the answers, you'll see a
> report alerting you to the difference and the evaluation will abort. To see this in
> the code, look for the function `diff_participants` and its use. You should correct
> these differences by predicting actions only on the participants chosen for you. That is,
> your prediction's first three columns should match the first three columns of the answer
> file.
#### Step 4. Calculating an overall score
The above process scores are aggregated to an overall performance score.
To illustrate, consider the precision and recall scores (as computed above), in a table:
<html>
<table>
<tr>
<th rowspan="2">ProcessID</th>
<th colspan="4">Precision</th>
<th colspan="4">Recall</th>
</tr>
<tr>
<td>Inputs</td>
<td>Outputs</td>
<td>Conversions</td>
<td>Moves</td>
<td>Inputs</td>
<td>Outputs</td>
<td>Conversions</td>
<td>Moves</td>
</tr>
<tr>
<td>1</td>
<td>IP<sub>1</sub></td>
<td>OP<sub>1</sub></td>
<td>CP<sub>1</sub></td>
<td>MP<sub>1</sub></td>
<td>IR<sub>1</sub></td>
<td>OR<sub>1</sub></td>
<td>CR<sub>1</sub></td>
<td>MR<sub>1</sub></td>
</tr>
<tr>
<td>2</td>
<td>IP<sub>2</sub></td>
<td>OP<sub>2</sub></td>
<td>CP<sub>2</sub></td>
<td>MP<sub>2</sub></td>
<td>IR<sub>2</sub></td>
<td>OR<sub>2</sub></td>
<td>CR<sub>2</sub></td>
<td>MR<sub>2</sub></td>
</tr>
<tr>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
</tr>
</table>
</html>
Given these precision and recall values, the overall performance is calculated as three numbers:
* **Overall Precision P** = (Average of all IP + Average of all OP + Average of all CP + Average of all MP) / 4
* **Overall Recall R** = (Average of all IR + Average of all OR + Average of all CR + Average of all MR) / 4
* **Overall F1 score** = harmonic mean of P and R = 2 * (P * R) / (P + R)
To read the code that calculates these final scores, look at the class `Evaluation`.
## Evaluator Development
### Testing
Run this script to run a comprehensive suite of tests:
```bash
./test.sh
```
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/README.md/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/README.md",
"repo_id": "ContextualSP",
"token_count": 6079
}
| 244 |
import unittest
from collections import OrderedDict
from process.action_file import ActionFile
from process.constants import NO_ACTION as NO_ACT
from process.constants import NO_LOCATION as NO_LOC, CREATE, DESTROY, MOVE
class TestSummarize(unittest.TestCase):
def test_load(self):
# Spot-check values loaded from an action file
actionfile = ActionFile.from_file('testfiles-0/answers.tsv')
# Process 514
self.assertEquals(
OrderedDict([
('glacier', [NO_LOC, NO_LOC, NO_LOC, NO_LOC, NO_LOC, NO_LOC, 'area', 'area']),
('mass', [NO_LOC, NO_LOC, NO_LOC, NO_LOC, NO_LOC, 'area', 'area', 'area']),
('snow', ['area', 'area', 'area', 'area', NO_LOC, NO_LOC, NO_LOC, NO_LOC]),
]),
actionfile.locations[514],
)
self.assertEquals(
OrderedDict([
('glacier', [NO_ACT, NO_ACT, NO_ACT, NO_ACT, NO_ACT, CREATE, NO_ACT]),
('mass', [NO_ACT, NO_ACT, NO_ACT, NO_ACT, CREATE, NO_ACT, NO_ACT]),
('snow', [NO_ACT, NO_ACT, NO_ACT, DESTROY, NO_ACT, NO_ACT, NO_ACT]),
]),
actionfile.actions[514],
)
self.assertEquals(7, actionfile.num_sentences[514])
# Process 540
self.assertEquals(
OrderedDict([
('air', ['unk', 'unk', 'unk', 'bronchiole', 'alveolus', 'unk', 'unk', 'unk', 'unk', 'unk', 'unk']),
('carbon dioxide',
['unk', 'unk', 'unk', 'unk', 'unk', 'bloodstream', 'bloodstream', 'alveolus', 'bronchiole', 'lung',
'body']),
('oxygen', ['unk', 'unk', 'unk', 'unk', 'unk', 'bloodstream', 'unk', 'unk', 'unk', 'unk', 'unk']),
]),
actionfile.locations[540],
)
self.assertEquals(
OrderedDict([
('air', [NO_ACT, NO_ACT, MOVE, MOVE, MOVE, NO_ACT, NO_ACT, NO_ACT, NO_ACT, NO_ACT]),
('carbon dioxide', [NO_ACT, NO_ACT, NO_ACT, NO_ACT, MOVE, NO_ACT, MOVE, MOVE, MOVE, MOVE]),
('oxygen', [NO_ACT, NO_ACT, NO_ACT, NO_ACT, MOVE, MOVE, NO_ACT, NO_ACT, NO_ACT, NO_ACT]),
]),
actionfile.actions[540]
)
self.assertEquals(10, actionfile.num_sentences[540])
if __name__ == '__main__':
unittest.main()
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/process/test_action_file.py/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/process/test_action_file.py",
"repo_id": "ContextualSP",
"token_count": 1222
}
| 245 |
from typing import List, Set
from text.stemmer import PorterStemmer
# Extract term sets from a phrase containing " AND " and " OR " tokens. A phrase like "foo OR bar AND fnord OR gnarf"
# is turned into a list of term sets like [{"foo", "bar"}, {"fnord", "gnarf"}] to match to another phrase's term sets.
def extract_termsets(phrase: str) -> List[Set[str]]:
outer = [p.strip() for p in phrase.split(" AND ")]
inner = [set(item.split(" OR ")) for item in outer]
return inner
# Extract term sets from a phrase containing " AND " and " OR " tokens. A phrase like "foo OR bar AND fnord OR gnarf"
# is turned into a list of term sets like [{"foo", "bar"}, {"fnord", "gnarf"}] to match to another phrase's term sets.
#
# This function normalizes each word.
def extract_termsets_with_normalization(phrase: str) -> List[Set[str]]:
outer = [p.strip() for p in phrase.split(" AND ")]
inner = [set(_normalize_words(item.split(" OR "))) for item in outer]
return inner
def terms_overlap(phrase1_terms: List[Set[str]], phrase2_terms: List[Set[str]]):
num = 0
for t1 in phrase1_terms:
for t2 in phrase2_terms:
if t1.intersection(t2):
num += 1
return num
def _normalize_words(words: List[str]) -> List[str]:
stemmed = [] # type: List[str]
for w in words:
if not w or len(w.strip()) == 0:
return [""]
w_lower = w.lower()
# Remove leading articles from the phrase (e.g., the rays => rays).
articles = ["a", "an", "the", "your", "his", "their", "my", "another", "other", "this", "that"]
starting_article = next((article for article in articles if w_lower.startswith(_leading_word(article))), None)
if starting_article is not None:
w_lower = w_lower.replace(_leading_word(starting_article), "", 1)
# Porter stemmer: rays => ray
stemmed.append(PorterStemmer().stem(w_lower).strip())
return stemmed
def _leading_word(word):
return word + " "
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/text/terms.py/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/text/terms.py",
"repo_id": "ContextualSP",
"token_count": 746
}
| 246 |
# TRACIE
* [evaluator](evaluator/) is the program used by the AI2 Leaderboard to evaluate submitted predictions.
* `data` holds the publicly available train and test sets (with hidden test labels), along with example prediction files (for testing the evaluator).
## Example usage
To evaluate your predictions, run the following (on a toy prediction file that
guesses `entailment` for every train instance, called
`train_uniform_predictions.jsonl`).
```sh
% python3 evaluator/evaluator.py --question_answers data/train_uniform.jsonl --predictions data/predictions.jsonl --output metrics.json
% cat metrics.json
{"total_acc": 0.5, "start_acc": 0.5, "end_acc": 0.5, "story_em": 0.0}
```
For usage of the evaluator, see the [evaluator README](evaluator/).
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/tracie/README.md/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/tracie/README.md",
"repo_id": "ContextualSP",
"token_count": 235
}
| 247 |
python build_ver_train.py --num_examples 10000 --local_rank 0 --start_index 0 &
python build_ver_train.py --num_examples 10000 --local_rank 1 --start_index 10000 &
python build_ver_train.py --num_examples 10000 --local_rank 2 --start_index 20000 &
python build_ver_train.py --num_examples 10000 --local_rank 3 --start_index 30000 &
python build_ver_train.py --num_examples 10000 --local_rank 4 --start_index 40000 &
python build_ver_train.py --num_examples 10000 --local_rank 5 --start_index 50000 &
python build_ver_train.py --num_examples 10000 --local_rank 6 --start_index 60000 &
python build_ver_train.py --num_examples 10000 --local_rank 7 --start_index 70000 &
python build_ver_train.py --num_examples 10000 --local_rank 8 --start_index 80000 &
python build_ver_train.py --num_examples 10000 --local_rank 9 --start_index 90000 &
python build_ver_train.py --num_examples 10000 --local_rank 10 --start_index 100000 &
python build_ver_train.py --num_examples 10000 --local_rank 11 --start_index 110000 &
python build_ver_train.py --num_examples 10000 --local_rank 12 --start_index 120000 &
python build_ver_train.py --num_examples 10000 --local_rank 13 --start_index 130000 &
python build_ver_train.py --num_examples 10000 --local_rank 14 --start_index 140000 &
python build_ver_train.py --num_examples 10000 --local_rank 15 --start_index 150000 &
python build_ver_train.py --num_examples 10000 --local_rank 16 --start_index 160000 &
python build_ver_train.py --num_examples 10000 --local_rank 17 --start_index 170000 &
python build_ver_train.py --num_examples 10000 --local_rank 18 --start_index 180000 &
python build_ver_train.py --num_examples 10000 --local_rank 19 --start_index 190000 &
python build_ver_train.py --num_examples 10000 --local_rank 20 --start_index 200000 &
python build_ver_train.py --num_examples 10000 --local_rank 21 --start_index 210000 &
python build_ver_train.py --num_examples 10000 --local_rank 22 --start_index 220000 &
python build_ver_train.py --num_examples 10000 --local_rank 23 --start_index 230000 &
python build_ver_train.py --num_examples 10000 --local_rank 24 --start_index 240000 &
python build_ver_train.py --num_examples 10000 --local_rank 25 --start_index 250000 &
python build_ver_train.py --num_examples 10000 --local_rank 26 --start_index 260000 &
python build_ver_train.py --num_examples 10000 --local_rank 27 --start_index 270000 &
python build_ver_train.py --num_examples 10000 --local_rank 28 --start_index 280000 &
python build_ver_train.py --num_examples 10000 --local_rank 29 --start_index 290000 &
python build_ver_train.py --num_examples 10000 --local_rank 30 --start_index 300000 &
python build_ver_train.py --num_examples 10000 --local_rank 31 --start_index 310000 &
python build_ver_train.py --num_examples 10000 --local_rank 32 --start_index 320000 &
python build_ver_train.py --num_examples 10000 --local_rank 33 --start_index 330000 &
python build_ver_train.py --num_examples 10000 --local_rank 34 --start_index 340000 &
python build_ver_train.py --num_examples 10000 --local_rank 35 --start_index 350000 &
python build_ver_train.py --num_examples 10000 --local_rank 36 --start_index 360000 &
python build_ver_train.py --num_examples 10000 --local_rank 37 --start_index 370000 &
python build_ver_train.py --num_examples 10000 --local_rank 38 --start_index 380000 &
python build_ver_train.py --num_examples 10000 --local_rank 39 --start_index 390000 &
python build_ver_train.py --num_examples 10000 --local_rank 40 --start_index 400000 &
python build_ver_train.py --num_examples 10000 --local_rank 41 --start_index 410000 &
python build_ver_train.py --num_examples 10000 --local_rank 42 --start_index 420000 &
python build_ver_train.py --num_examples 10000 --local_rank 43 --start_index 430000 &
python build_ver_train.py --num_examples 10000 --local_rank 44 --start_index 440000 &
python build_ver_train.py --num_examples 10000 --local_rank 45 --start_index 450000 &
python build_ver_train.py --num_examples 10000 --local_rank 46 --start_index 460000 &
python build_ver_train.py --num_examples 10000 --local_rank 47 --start_index 470000 &
python build_ver_train.py --num_examples 10000 --local_rank 48 --start_index 480000 &
python build_ver_train.py --num_examples 10000 --local_rank 49 --start_index 490000
|
ContextualSP/logigan/corpus_construction/elastic_search/run_ver.sh/0
|
{
"file_path": "ContextualSP/logigan/corpus_construction/elastic_search/run_ver.sh",
"repo_id": "ContextualSP",
"token_count": 1350
}
| 248 |
from tqdm import tqdm, trange
from transformers import AutoModelForSequenceClassification , AutoTokenizer
import torch.nn as nn
import argparse
import copy
from copy import deepcopy
from torch.utils.data import Dataset, DataLoader, IterableDataset
import os, sys, time
import json
import string
import re
from collections import Counter
from datasets import Dataset, load_dataset, load_metric
import numpy as np
from transformers import Trainer, TrainingArguments
from transformers.trainer_utils import PredictionOutput, EvalLoopOutput
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
import pandas as pd
from torch.nn.functional import softmax
import datasets
datasets.set_caching_enabled(False)
from parameters16g_es_corpusb import *
def generate_ver_train_iter(pred):
logits = pred.predictions
pred_prob = np.exp(logits) / np.sum(np.exp(logits), axis=-1)[:,None].repeat(3, axis=-1)
# 0 as entailment, 1 & 2 as non-ent
ent_prob = pred_prob[:,0, None]
non_ent_prob = pred_prob[:,1, None] + pred_prob[:,2, None]
prob = np.concatenate([ent_prob, non_ent_prob], axis=-1)
pred_labels = np.argmax(prob, axis=-1)
if trainer.args.local_rank in [-1,0]:
with open(ver_train_iter_path, "w") as f:
inputs = infer_dataset["input"]
outputs = infer_dataset["conclusion"]
preds = infer_dataset["gen_conclusion"]
for in_, out_, pred, label in zip(inputs, outputs, preds, pred_labels):
example1 = {"input": in_, "conclusions": [pred,out_], "is_gold": [1 if 1 - int(label) == 1 else 0,1]} # pred
# example2 = {"input": in_, "conclusion": out_, "is_gold": 1} # gold
json.dump(example1, f); f.write("\n")
# json.dump(example2, f); f.write("\n")
print(len(inputs))
print(f"\n\n\nSuccess: ver_train_iter.jsonl has been created at {ver_train_iter_path}.\n\n")
datasets = load_dataset('json', data_files={"infer": unlabeled_ver_train_iter_path}, download_mode='force_redownload')
### model setting setting 1
tokenizer = AutoTokenizer.from_pretrained("ynie/albert-xxlarge-v2-snli_mnli_fever_anli_R1_R2_R3-nli")
infer_dataset = datasets["infer"]
encoder_max_length = 512
decoder_max_length = 128
def process_data_to_model_inputs(batch):
inputs1 = batch["conclusion"]
inputs2 = batch["gen_conclusion"]
inputs = tokenizer(inputs1, inputs2, truncation=True, padding="max_length", max_length=encoder_max_length)
batch["input_ids"] = inputs.input_ids
batch["attention_mask"] = inputs.attention_mask
return batch
## map train data
workers = 8
infer_dataset = infer_dataset.map(
process_data_to_model_inputs,
batched=True,
num_proc=workers,
batch_size=16,
load_from_cache_file=False, # necessary
)
infer_dataset.set_format(type="torch", columns=["input_ids", "attention_mask"])
training_args = TrainingArguments(
per_device_train_batch_size=1,
per_device_eval_batch_size=nli_per_device_eval_batch_size,
save_strategy="no",
output_dir=nli_output_dir
)
# model = BartForConditionalGeneration.from_pretrained("./pbart_checkpoints")
model = AutoModelForSequenceClassification.from_pretrained("ynie/albert-xxlarge-v2-snli_mnli_fever_anli_R1_R2_R3-nli")
# pretrained bm 3, lp 3, nr 2
trainer = Trainer(
model=model,
tokenizer=tokenizer,
args=training_args
)
if __name__ == "__main__":
# trainer.train()
# trainer.predict(valid_dataset, ignore_keys=["encoder_last_hidden_state"])
pred = trainer.predict(infer_dataset, ignore_keys=["encoder_last_hidden_state"])
generate_ver_train_iter(pred)
|
ContextualSP/logigan/pre-training/nli_es.py/0
|
{
"file_path": "ContextualSP/logigan/pre-training/nli_es.py",
"repo_id": "ContextualSP",
"token_count": 1489
}
| 249 |
import json
from collections import defaultdict
import re
FILTER_PRED = ["people.person.spouse_s/ns:people.marriage.spouse|ns:fictional_universe.fictional_character.married_to/ns:fictional_universe.marriage_of_fictional_characters.spouses",
"people.person.sibling_s/ns:people.sibling_relationship.sibling|ns:fictional_universe.fictional_character.siblings/ns:fictional_universe.sibling_relationship_of_fictional_characters.siblings"]
split = "mcd1"
dir = f"./data/{split}/"
test_file = f"{dir}/test/test_mask_predict_classification.csv"
test_sparql = open(f"{dir}/test/test_target.txt")
test_query = open(f"{dir}/test/test_encode.txt")
test_mask_file = f"{dir}/test/test_mask_predict_mapping.csv"
test_predict = json.loads(open(f"./output/esim-mask-{split}-predict").readlines()[0])
test_predict_prob = json.loads(open(f"./output/esim-mask-{split}-predict.prob").readlines()[0])
test_data = [i.strip().split('\t') for i in open(test_file).readlines()[1:]]
test_mask_data = [i.strip().split('\t') for i in open(test_mask_file).readlines()[1:]]
token_pred_dict_file = open("./data/phrase_table.pred").readlines()
token_pred_dict = defaultdict(list)
for i in token_pred_dict_file:
i = eval(i.strip())[0]
token_pred_dict[i[0]].append(i[1])
assert len(test_data) == len(test_mask_data) == len(test_predict) == len(test_predict_prob),print(len(test_data),len(test_mask_data),len(test_predict))
golden_dict = {i.strip():j.strip() for i, j in zip(test_query, test_sparql)}
pred_dict = defaultdict(list)
pos_error = 0
false_error = 0
right = 0
pred_1 = 0
pos_cnt = 0
false_errors, predicate_set, token_state = defaultdict(list), [], []
cur_query = ''
for idx, info in enumerate(zip(test_data, test_predict, test_predict_prob)):
item, pred_v, pred_prob = info
if item[0] != cur_query:
if len(predicate_set):
for token, preds in predicate_set.items():
if token_state[token] == 0 and len(preds):
max_idx, max_score, max_item = -1, float('-inf'), ''
for pred in preds:
if len(false_errors[pred]):
false_errors[pred] = sorted(false_errors[pred], key = lambda k:k[2], reverse = True)
if false_errors[pred][0][2] > max_score:
max_pred = pred
max_idx, max_item , max_score = false_errors[pred][0]
if max_idx != -1:
false_errors[max_pred].pop(0)
simplified_query = max_item[0].strip()
original_query = test_mask_data[max_idx][0]
mask_entities = eval(test_mask_data[max_idx][-1])
# print("mask mask_entities:", mask_entities)
assert isinstance(mask_entities, dict)
for key, v in mask_entities.items():
if key in max_item[1]:
for vv in v:
pred_dict[original_query] += max_item[1].strip().replace(key, vv).split(" . ")
pred_dict[original_query] += max_item[1].strip().split(" . ")
predicate_set = {token:token_pred_dict[token] for token in item[0].split()}
predicate2token = defaultdict(list)
for k,v in predicate_set.items():
for vv in v:
predicate2token[vv].append(k)
token_state = {token:0 for token in item[0].split()}
cur_query = item[0]
false_errors = defaultdict(list)
# print("="*50)
if pred_v == 1 and item[-1].strip() == 'True':
# if item[1].strip().startswith("M"):
# print("pred False", item)
if len(item[1].split(" . ")) == 1:
a1, r, a2 = item[1].split()
if re.match(r'\?x[0-9]*|M[0-9]*', a1) and re.match(r'\?x[0-9]*|M[0-9]*', a2) and r!='is':
v= r
else:
v = f"{r} {a2}"
false_errors[v].append((idx, item, pred_prob[0]))
false_error += 1
pass
elif int(pred_v) == 0 and item[-1].strip() == 'False':
pos_error += 1
pass
else:
right += 1
if int(pred_v) == 0:
for triple in item[1].split(" . "):
a1, r, a2 = triple.split()
if re.match(r'\?x[0-9]*|M[0-9]*', a1) and re.match(r'\?x[0-9]*|M[0-9]*', a2) and r!='is':
token_key= r
else:
token_key = f"{r} {a2}"
for token in predicate2token[token_key]:
token_state[token] = 1
if 'executive' not in token_key:
break
simplified_query = item[0].strip()
original_query = test_mask_data[idx][0]
mask_entities = eval(test_mask_data[idx][-1])
assert isinstance(mask_entities, dict)
for key, v in mask_entities.items():
if key in item[1]:
for vv in v:
pred_dict[original_query] += item[1].strip().replace(key, vv).split(" . ")
pred_dict[original_query] += item[1].strip().split(" . ")
pred_1 += 1
if item[-1].strip() == 'True':
pos_cnt += 1
# print(right / len(test_data))
# print(pos_error / (len(test_data) - pos_cnt))
# print(false_error / pos_cnt)
# print(false_error, pos_error)
# print("pred_dict:",len(pred_dict))
# print("golden_dict:",len(golden_dict))
cnt = 0
for item, val in golden_dict.items():
sub_sparqls = list(set(pred_dict.get(item, [])))
sub_sparqls.sort()
filter_sparqls = []
for sub_sparql in sub_sparqls:
a1, r, a2 = sub_sparql.split()
if r in FILTER_PRED:
filter_sparqls.append(f"FILTER ( {a1} != {a2} )")
filter_sparqls.sort()
if " . ".join(sub_sparqls+filter_sparqls) == val:
cnt += 1
else:
if len(set(sub_sparqls + filter_sparqls) - set(val.split(' . '))) == 0 and len(set(val.split(' . ')) - set(sub_sparqls + filter_sparqls)) == 0:
print(f"\n\nquery:{item}\n\nsparqls-golden:{set(sub_sparqls + filter_sparqls) - set(val.split(' . '))}\n\ngolden-sparqls:{set(val.split(' . ')) - set(sub_sparqls + filter_sparqls)}\n\nsub-sparql:{sub_sparqls + filter_sparqls}\n\ngolden:{val}")
print("accuracy:", cnt / len(golden_dict))
|
ContextualSP/poset_decoding/evaluate.py/0
|
{
"file_path": "ContextualSP/poset_decoding/evaluate.py",
"repo_id": "ContextualSP",
"token_count": 2453
}
| 250 |
matchzoo
========
.. toctree::
:maxdepth: 4
matchzoo
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/docs/source/modules.rst/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/docs/source/modules.rst",
"repo_id": "ContextualSP",
"token_count": 27
}
| 251 |
from matchzoo.engine.base_callback import BaseCallback
class LambdaCallback(BaseCallback):
"""
LambdaCallback. Just a shorthand for creating a callback class.
See :class:`matchzoo.engine.base_callback.BaseCallback` for more details.
Example:
>>> import matchzoo as mz
>>> from matchzoo.dataloader.callbacks import LambdaCallback
>>> data = mz.datasets.toy.load_data()
>>> batch_func = lambda x: print(type(x))
>>> unpack_func = lambda x, y: print(type(x), type(y))
>>> callback = LambdaCallback(on_batch_data_pack=batch_func,
... on_batch_unpacked=unpack_func)
>>> dataset = mz.dataloader.Dataset(
... data, callbacks=[callback])
>>> _ = dataset[0]
<class 'matchzoo.data_pack.data_pack.DataPack'>
<class 'dict'> <class 'numpy.ndarray'>
"""
def __init__(self, on_batch_data_pack=None, on_batch_unpacked=None):
"""Init."""
self._on_batch_unpacked = on_batch_unpacked
self._on_batch_data_pack = on_batch_data_pack
def on_batch_data_pack(self, data_pack):
"""`on_batch_data_pack`."""
if self._on_batch_data_pack:
self._on_batch_data_pack(data_pack)
def on_batch_unpacked(self, x, y):
"""`on_batch_unpacked`."""
if self._on_batch_unpacked:
self._on_batch_unpacked(x, y)
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/dataloader/callbacks/lambda_callback.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/dataloader/callbacks/lambda_callback.py",
"repo_id": "ContextualSP",
"token_count": 634
}
| 252 |
"""FastText embedding data loader."""
from pathlib import Path
import matchzoo as mz
_fasttext_embedding_url = "https://dl.fbaipublicfiles.com/fasttext/vectors" \
"-wiki/wiki.{}.vec"
def load_fasttext_embedding(language: str = 'en') -> mz.embedding.Embedding:
"""
Return the pretrained fasttext embedding.
:param language: the language of embedding. Supported language can be
referred to "https://github.com/facebookresearch/fastText/blob/master"
"/docs/pretrained-vectors.md"
:return: The :class:`mz.embedding.Embedding` object.
"""
file_name = _fasttext_embedding_url.split('/')[-1].format(language)
file_path = (Path(mz.USER_DATA_DIR) / 'fasttext').joinpath(file_name)
if not file_path.exists():
mz.utils.get_file(file_name,
_fasttext_embedding_url.format(language),
extract=False,
cache_dir=mz.USER_DATA_DIR,
cache_subdir='fasttext')
return mz.embedding.load_from_file(file_path=str(file_path),
mode='fasttext')
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/datasets/embeddings/load_fasttext_embedding.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/datasets/embeddings/load_fasttext_embedding.py",
"repo_id": "ContextualSP",
"token_count": 541
}
| 253 |
"""Base callback."""
import abc
import numpy as np
import matchzoo as mz
class BaseCallback(abc.ABC):
"""
DataGenerator callback base class.
To build your own callbacks, inherit `mz.data_generator.callbacks.Callback`
and overrides corresponding methods.
A batch is processed in the following way:
- slice data pack based on batch index
- handle `on_batch_data_pack` callbacks
- unpack data pack into x, y
- handle `on_batch_x_y` callbacks
- return x, y
"""
def on_batch_data_pack(self, data_pack: mz.DataPack):
"""
`on_batch_data_pack`.
:param data_pack: a sliced DataPack before unpacking.
"""
@abc.abstractmethod
def on_batch_unpacked(self, x: dict, y: np.ndarray):
"""
`on_batch_unpacked`.
:param x: unpacked x.
:param y: unpacked y.
"""
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/engine/base_callback.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/engine/base_callback.py",
"repo_id": "ContextualSP",
"token_count": 357
}
| 254 |
"""Mean average precision metric for ranking."""
import numpy as np
from matchzoo.engine.base_metric import (
BaseMetric, sort_and_couple, RankingMetric
)
class MeanAveragePrecision(RankingMetric):
"""Mean average precision metric."""
ALIAS = ['mean_average_precision', 'map']
def __init__(self, threshold: float = 0.):
"""
:class:`MeanAveragePrecision` constructor.
:param threshold: The threshold of relevance degree.
"""
self._threshold = threshold
def __repr__(self):
""":return: Formated string representation of the metric."""
return f"{self.ALIAS[0]}({self._threshold})"
def __call__(self, y_true: np.array, y_pred: np.array) -> float:
"""
Calculate mean average precision.
Example:
>>> y_true = [0, 1, 0, 0]
>>> y_pred = [0.1, 0.6, 0.2, 0.3]
>>> MeanAveragePrecision()(y_true, y_pred)
1.0
:param y_true: The ground true label of each document.
:param y_pred: The predicted scores of each document.
:return: Mean average precision.
"""
result = 0.
pos = 0
coupled_pair = sort_and_couple(y_true, y_pred)
for idx, (label, score) in enumerate(coupled_pair):
if label > self._threshold:
pos += 1.
result += pos / (idx + 1.)
if pos == 0:
return 0.
else:
return result / pos
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/metrics/mean_average_precision.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/metrics/mean_average_precision.py",
"repo_id": "ContextualSP",
"token_count": 670
}
| 255 |
"""An implementation of DRMMTKS Model."""
import typing
import torch
import torch.nn as nn
import torch.nn.functional as F
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine.base_callback import BaseCallback
from matchzoo.engine.param import Param
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine import hyper_spaces
from matchzoo.dataloader import callbacks
from matchzoo.modules import Attention
class DRMMTKS(BaseModel):
"""
DRMMTKS Model.
Examples:
>>> model = DRMMTKS()
>>> model.params['top_k'] = 10
>>> model.params['mlp_num_layers'] = 1
>>> model.params['mlp_num_units'] = 5
>>> model.params['mlp_num_fan_out'] = 1
>>> model.params['mlp_activation_func'] = 'tanh'
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(
with_embedding=True,
with_multi_layer_perceptron=True
)
params.add(Param(name='mask_value', value=0,
desc="The value to be masked from inputs."))
params.add(Param(
'top_k', value=10,
hyper_space=hyper_spaces.quniform(low=2, high=100),
desc="Size of top-k pooling layer."
))
params['mlp_num_fan_out'] = 1
return params
@classmethod
def get_default_padding_callback(
cls,
fixed_length_left: int = 10,
fixed_length_right: int = 100,
pad_word_value: typing.Union[int, str] = 0,
pad_word_mode: str = 'pre',
with_ngram: bool = False,
fixed_ngram_length: int = None,
pad_ngram_value: typing.Union[int, str] = 0,
pad_ngram_mode: str = 'pre'
) -> BaseCallback:
"""
Model default padding callback.
The padding callback's on_batch_unpacked would pad a batch of data to
a fixed length.
:return: Default padding callback.
"""
return callbacks.BasicPadding(
fixed_length_left=fixed_length_left,
fixed_length_right=fixed_length_right,
pad_word_value=pad_word_value,
pad_word_mode=pad_word_mode,
with_ngram=with_ngram,
fixed_ngram_length=fixed_ngram_length,
pad_ngram_value=pad_ngram_value,
pad_ngram_mode=pad_ngram_mode
)
def build(self):
"""Build model structure."""
self.embedding = self._make_default_embedding_layer()
self.attention = Attention(
input_size=self._params['embedding_output_dim']
)
self.mlp = self._make_multi_layer_perceptron_layer(
self._params['top_k']
)
self.out = self._make_output_layer(1)
def forward(self, inputs):
"""Forward."""
# Scalar dimensions referenced here:
# B = batch size (number of sequences)
# D = embedding size
# L = `input_left` sequence length
# R = `input_right` sequence length
# K = size of top-k
# Left input and right input.
# shape = [B, L]
# shape = [B, R]
query, doc = inputs['text_left'], inputs['text_right']
# shape = [B, L]
mask_query = (query == self._params['mask_value'])
# Process left input.
# shape = [B, L, D]
embed_query = self.embedding(query.long())
# shape = [B, R, D]
embed_doc = self.embedding(doc.long())
# Matching histogram of top-k
# shape = [B, L, R]
matching_matrix = torch.einsum(
'bld,brd->blr',
F.normalize(embed_query, p=2, dim=-1),
F.normalize(embed_doc, p=2, dim=-1)
)
# shape = [B, L, K]
matching_topk = torch.topk(
matching_matrix,
k=self._params['top_k'],
dim=-1,
sorted=True
)[0]
# shape = [B, L]
attention_probs = self.attention(embed_query, mask_query)
# shape = [B, L]
dense_output = self.mlp(matching_topk).squeeze(dim=-1)
x = torch.einsum('bl,bl->b', dense_output, attention_probs)
out = self.out(x.unsqueeze(dim=-1))
return out
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/models/drmmtks.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/models/drmmtks.py",
"repo_id": "ContextualSP",
"token_count": 2065
}
| 256 |
"""DenseNet module."""
import typing
import torch
import torch.nn as nn
class DenseBlock(nn.Module):
"""Dense block of DenseNet."""
def __init__(
self,
in_channels,
growth_rate: int = 20,
kernel_size: tuple = (2, 2),
layers_per_dense_block: int = 3
):
"""Init."""
super().__init__()
dense_block = []
for _ in range(layers_per_dense_block):
conv_block = self._make_conv_block(in_channels, growth_rate, kernel_size)
dense_block.append(conv_block)
in_channels += growth_rate
self._dense_block = nn.ModuleList(dense_block)
def forward(self, x):
"""Forward."""
for layer in self._dense_block:
conv_out = layer(x)
x = torch.cat([x, conv_out], dim=1)
return x
@classmethod
def _make_conv_block(
cls,
in_channels: int,
out_channels: int,
kernel_size: tuple
) -> nn.Module:
"""Make conv block."""
return nn.Sequential(
nn.ConstantPad2d(
(0, kernel_size[1] - 1, 0, kernel_size[0] - 1), 0
),
nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size
),
nn.ReLU()
)
class DenseNet(nn.Module):
"""
DenseNet module.
:param in_channels: Feature size of input.
:param nb_dense_blocks: The number of blocks in densenet.
:param layers_per_dense_block: The number of convolution layers in dense block.
:param growth_rate: The filter size of each convolution layer in dense block.
:param transition_scale_down_ratio: The channel scale down ratio of the convolution
layer in transition block.
:param conv_kernel_size: The kernel size of convolution layer in dense block.
:param pool_kernel_size: The kernel size of pooling layer in transition block.
"""
def __init__(
self,
in_channels,
nb_dense_blocks: int = 3,
layers_per_dense_block: int = 3,
growth_rate: int = 10,
transition_scale_down_ratio: float = 0.5,
conv_kernel_size: tuple = (2, 2),
pool_kernel_size: tuple = (2, 2),
):
"""Init."""
super().__init__()
dense_blocks = []
transition_blocks = []
for _ in range(nb_dense_blocks):
dense_block = DenseBlock(
in_channels, growth_rate, conv_kernel_size, layers_per_dense_block)
in_channels += layers_per_dense_block * growth_rate
dense_blocks.append(dense_block)
transition_block = self._make_transition_block(
in_channels, transition_scale_down_ratio, pool_kernel_size)
in_channels = int(in_channels * transition_scale_down_ratio)
transition_blocks.append(transition_block)
self._dense_blocks = nn.ModuleList(dense_blocks)
self._transition_blocks = nn.ModuleList(transition_blocks)
self._out_channels = in_channels
@property
def out_channels(self) -> int:
"""`out_channels` getter."""
return self._out_channels
def forward(self, x):
"""Forward."""
for dense_block, trans_block in zip(self._dense_blocks, self._transition_blocks):
x = dense_block(x)
x = trans_block(x)
return x
@classmethod
def _make_transition_block(
cls,
in_channels: int,
transition_scale_down_ratio: float,
pool_kernel_size: tuple
) -> nn.Module:
return nn.Sequential(
nn.Conv2d(
in_channels=in_channels,
out_channels=int(in_channels * transition_scale_down_ratio),
kernel_size=1
),
nn.MaxPool2d(kernel_size=pool_kernel_size, stride=pool_kernel_size)
)
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/modules/dense_net.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/modules/dense_net.py",
"repo_id": "ContextualSP",
"token_count": 1889
}
| 257 |
from .unit import Unit
from .digit_removal import DigitRemoval
from .frequency_filter import FrequencyFilter
from .lemmatization import Lemmatization
from .lowercase import Lowercase
from .matching_histogram import MatchingHistogram
from .ngram_letter import NgramLetter
from .punc_removal import PuncRemoval
from .stateful_unit import StatefulUnit
from .stemming import Stemming
from .stop_removal import StopRemoval
from .tokenize import Tokenize
from .vocabulary import Vocabulary
from .word_hashing import WordHashing
from .character_index import CharacterIndex
from .word_exact_match import WordExactMatch
from .truncated_length import TruncatedLength
def list_available() -> list:
from matchzoo.utils import list_recursive_concrete_subclasses
return list_recursive_concrete_subclasses(Unit)
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/__init__.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/__init__.py",
"repo_id": "ContextualSP",
"token_count": 226
}
| 258 |
import numpy as np
from .unit import Unit
class WordExactMatch(Unit):
"""
WordExactUnit Class.
Process unit to get a binary match list of two word index lists. The
word index list is the word representation of a text.
Examples:
>>> import pandas
>>> input_ = pandas.DataFrame({
... 'text_left':[[1, 2, 3],[4, 5, 7, 9]],
... 'text_right':[[5, 3, 2, 7],[2, 3, 5]]}
... )
>>> left_word_exact_match = WordExactMatch(
... match='text_left', to_match='text_right'
... )
>>> left_out = input_.apply(left_word_exact_match.transform, axis=1)
>>> left_out[0]
[0, 1, 1]
>>> left_out[1]
[0, 1, 0, 0]
>>> right_word_exact_match = WordExactMatch(
... match='text_right', to_match='text_left'
... )
>>> right_out = input_.apply(right_word_exact_match.transform, axis=1)
>>> right_out[0]
[0, 1, 1, 0]
>>> right_out[1]
[0, 0, 1]
"""
def __init__(
self,
match: str,
to_match: str
):
"""
Class initialization.
:param match: the 'match' column name.
:param to_match: the 'to_match' column name.
"""
self._match = match
self._to_match = to_match
def transform(self, input_) -> list:
"""
Transform two word index lists into a binary match list.
:param input_: a dataframe include 'match' column and
'to_match' column.
:return: a binary match result list of two word index lists.
"""
match_binary = []
for i in range(len(input_[self._match])):
if input_[self._match][i] in set(input_[self._to_match]):
match_binary.append(1)
else:
match_binary.append(0)
return match_binary
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/word_exact_match.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/word_exact_match.py",
"repo_id": "ContextualSP",
"token_count": 903
}
| 259 |
"""Matchzoo version file."""
__version__ = '1.1.1'
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/version.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/version.py",
"repo_id": "ContextualSP",
"token_count": 22
}
| 260 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.