code
stringlengths 31
1.05M
| apis
list | extract_api
stringlengths 97
1.91M
|
---|---|---|
"""
Source code for osm_clipper
Copyright (C) 2020 <NAME>. All versions released under the MIT license.
"""
import os
import numpy
import pandas
import pygeos
import geopandas
import urllib.request
import zipfile
from tqdm import tqdm
from multiprocessing import Pool,cpu_count
from shapely.geometry import MultiPolygon
from shapely.wkb import loads
from geopy.distance import geodesic
def planet_osm(data_path):
"""
This function will download the planet file from the OSM servers.
"""
osm_path_in = os.path.join(data_path,'planet_osm')
# create directory to save planet osm file if that directory does not exit yet.
if not os.path.exists(osm_path_in):
os.makedirs(osm_path_in)
# if planet file is not downloaded yet, download it.
if 'planet-latest.osm.pbf' not in os.listdir(osm_path_in):
url = 'https://planet.openstreetmap.org/pbf/planet-latest.osm.pbf'
urllib.request.urlretrieve(url, os.path.join(osm_path_in,'planet-latest.osm.pbf'))
else:
print('Planet file is already downloaded')
def country_osm(country,data_path):
"""
This function will download the country file from the GeoFabrik servers.
Note: some countries are not available no GeoFabrik. This will require you to
download the planet file and clip the country from their
"""
osm_path_in = os.path.join(data_path,'country_osm')
# create directory to save planet osm file if that directory does not exit yet.
if not os.path.exists(osm_path_in):
os.makedirs(osm_path_in)
# if planet file is not downloaded yet, download it.
try:
if '{}.osm.pbf'.format(country) not in os.listdir(osm_path_in):
url = 'http://download.geofabrik.de/{}/{}-latest.osm.pbf'.format(country_list()[country][0],country_list()[country][1])
urllib.request.urlretrieve(url, os.path.join(osm_path_in,'{}.osm.pbf'.format(country)))
else:
print('Country file is already downloaded')
except:
print('{} is not available on GeoFabrik, you will have to clip it yourself from the planet file!'.format(country))
def gadm36_planet(data_path):
"""
This function will download the GADM36 file.
"""
gadm_path_in = os.path.join(data_path,'GADM36')
# create directory to save planet osm file if that directory does not exit yet.
if not os.path.exists(gadm_path_in):
os.makedirs(gadm_path_in)
# if GADM file is not downloaded yet, download it.
if 'gadm36_levels.gpkg' not in os.listdir(gadm_path_in):
url = 'https://biogeo.ucdavis.edu/data/gadm3.6/gadm36_levels_gpkg.zip'
urllib.request.urlretrieve(url, os.path.join(gadm_path_in,'gadm36_levels_gpkg.zip'))
with zipfile.ZipFile(os.path.join(gadm_path_in,'gadm36_levels_gpkg.zip'), 'r') as zip_ref:
zip_ref.extractall(gadm_path_in)
os.remove(os.path.join(gadm_path_in,'gadm36_levels_gpkg.zip'))
else:
print('GADM36 planet file is already downloaded')
def gadm36_country(country_code,data_path):
"""
This function will download the GADM36 file.
"""
gadm_path_in = os.path.join(data_path,'GADM36')
# create directory to save planet osm file if that directory does not exit yet.
if not os.path.exists(gadm_path_in):
os.makedirs(gadm_path_in)
# if GADM file is not downloaded yet, download it.
if 'gadm36_{}.gpkg'.format(country_code) not in os.listdir(gadm_path_in):
url = 'https://biogeo.ucdavis.edu/data/gadm3.6/gpkg/gadm36_{}_gpkg.zip'.format(country_code)
urllib.request.urlretrieve(url, os.path.join(gadm_path_in,'gadm36_{}_gpkg.zip').format(country_code))
with zipfile.ZipFile(os.path.join(gadm_path_in,'gadm36_{}_gpkg.zip').format(country_code), 'r') as zip_ref:
zip_ref.extractall(gadm_path_in)
os.remove(os.path.join(gadm_path_in,'gadm36_{}_gpkg.zip').format(country_code))
else:
print('GADM36 country file is already downloaded')
def global_shapefiles(data_path,regionalized=False,assigned_level=1):
"""
This function will simplify shapes and add necessary columns, to make further processing more quickly
For now, we will make use of the latest GADM data, split by level: https://gadm.org/download_world.html
Optional Arguments:
*regionalized* : Default is **False**. Set to **True** will also create the global_regions.shp file.
"""
gadm_path = os.path.join(data_path,'GADM36','gadm36_levels.gpkg')
cleaned_shapes_path = os.path.join(data_path,'cleaned_shapes')
if not os.path.exists(cleaned_shapes_path):
os.makedirs(cleaned_shapes_path)
# path to country GADM file
if regionalized == False:
# load country file
gadm_level0 = pandas.DataFrame(geopandas.read_file(gadm_path,layer='level0'))
#convert to pygeos
tqdm.pandas(desc='Convert geometries to pygeos')
gadm_level0['geometry'] = gadm_level0.geometry.progress_apply(lambda x: pygeos.from_shapely(x))
# remove antarctica, no roads there anyways
gadm_level0 = gadm_level0.loc[~gadm_level0['NAME_0'].isin(['Antarctica'])]
# remove tiny shapes to reduce size substantially
tqdm.pandas(desc='Remove tiny shapes')
gadm_level0['geometry'] = gadm_level0.progress_apply(remove_tiny_shapes,axis=1)
#simplify geometry
tqdm.pandas(desc='Simplify geometry')
gadm_level0.geometry = gadm_level0.geometry.progress_apply(lambda x: pygeos.simplify(
pygeos.buffer(
pygeos.simplify(
x,tolerance = 0.005, preserve_topology=True),0.01),tolerance = 0.005, preserve_topology=True))
#save to new country file
glob_ctry_path = os.path.join(cleaned_shapes_path,'global_countries.gpkg')
tqdm.pandas(desc='Convert geometries back to shapely')
gadm_level0.geometry = gadm_level0.geometry.progress_apply(lambda x: loads(pygeos.to_wkb(x)))
geopandas.GeoDataFrame(gadm_level0).to_file(glob_ctry_path,layer='level0', driver="GPKG")
else:
# this is dependent on the country file, so check whether that one is already created:
glob_ctry_path = os.path.join(cleaned_shapes_path,'global_countries.gpkg')
if os.path.exists(glob_ctry_path):
gadm_level0 = geopandas.read_file(os.path.join(glob_ctry_path),layer='level0')
else:
print('ERROR: You need to create the country file first')
return None
# load region file
gadm_level_x = pandas.DataFrame(geopandas.read_file(gadm_path,layer='level{}'.format(assigned_level)))
#convert to pygeos
tqdm.pandas(desc='Convert geometries to pygeos')
gadm_level_x['geometry'] = gadm_level_x.geometry.progress_apply(lambda x: pygeos.from_shapely(x))
# remove tiny shapes to reduce size substantially
tqdm.pandas(desc='Remove tiny shapes')
gadm_level_x['geometry'] = gadm_level_x.progress_apply(remove_tiny_shapes,axis=1)
#simplify geometry
tqdm.pandas(desc='Simplify geometry')
gadm_level_x.geometry = gadm_level_x.geometry.progress_apply(lambda x: pygeos.simplify(
pygeos.buffer(
pygeos.simplify(
x,tolerance = 0.005, preserve_topology=True),0.01),tolerance = 0.005, preserve_topology=True))
# add some missing geometries from countries with no subregions
get_missing_countries = list(set(list(gadm_level0.GID_0.unique())).difference(list(gadm_level_x.GID_0.unique())))
#TO DO: GID_2 and lower tiers should first be filled by a tier above, rather then by the country file
mis_country = gadm_level0.loc[gadm_level0['GID_0'].isin(get_missing_countries)]#
if assigned_level==1:
mis_country['GID_1'] = mis_country['GID_0']+'.'+str(0)+'_'+str(1)
elif assigned_level==2:
mis_country['GID_2'] = mis_country['GID_0']+'.'+str(0)+'.'+str(0)+'_'+str(1)
elif assigned_level==3:
mis_country['GID_3'] = mis_country['GID_0']+'.'+str(0)+'.'+str(0)+'.'+str(0)+'_'+str(1)
elif assigned_level==4:
mis_country['GID_4'] = mis_country['GID_0']+'.'+str(0)+'.'+str(0)+'.'+str(0)+'.'+str(0)+'_'+str(1)
elif assigned_level==5:
mis_country['GID_5'] = mis_country['GID_0']+'.'+str(0)+'.'+str(0)+'.'+str(0)+'.'+str(0)+'.'+str(0)+'_'+str(1)
tqdm.pandas(desc='Convert geometries back to shapely')
gadm_level_x.geometry = gadm_level_x.geometry.progress_apply(lambda x: loads(pygeos.to_wkb(x)))
# concat missing country to gadm levels
gadm_level_x = geopandas.GeoDataFrame( pandas.concat( [gadm_level_x,mis_country] ,ignore_index=True) )
gadm_level_x.reset_index(drop=True,inplace=True)
#save to new country file
gadm_level_x.to_file(os.path.join(cleaned_shapes_path,'global_regions.gpkg'),layer='level{}'.format(assigned_level), driver="GPKG")
def remove_tiny_shapes(x,regionalized=False):
"""This function will remove the small shapes of multipolygons. Will reduce the size of the file.
Arguments:
*x* : a geometry feature (Polygon) to simplify. Countries which are very large will see larger (unhabitated) islands being removed.
Optional Arguments:
*regionalized* : Default is **False**. Set to **True** will use lower threshold settings (default: **False**).
Returns:
*MultiPolygon* : a shapely geometry MultiPolygon without tiny shapes.
"""
# if its a single polygon, just return the polygon geometry
if pygeos.geometry.get_type_id(x.geometry) == 3: # 'Polygon':
return x.geometry
# if its a multipolygon, we start trying to simplify and remove shapes if its too big.
elif pygeos.geometry.get_type_id(x.geometry) == 6: # 'MultiPolygon':
if regionalized == False:
area1 = 0.1
area2 = 250
elif regionalized == True:
area1 = 0.01
area2 = 50
# dont remove shapes if total area is already very small
if pygeos.area(x.geometry) < area1:
return x.geometry
# remove bigger shapes if country is really big
if x['GID_0'] in ['CHL','IDN']:
threshold = 0.01
elif x['GID_0'] in ['RUS','GRL','CAN','USA']:
if regionalized == True:
threshold = 0.01
else:
threshold = 0.01
elif pygeos.area(x.geometry) > area2:
threshold = 0.1
else:
threshold = 0.001
# save remaining polygons as new multipolygon for the specific country
new_geom = []
for index_ in range(pygeos.geometry.get_num_geometries(x.geometry)):
if pygeos.area(pygeos.geometry.get_geometry(x.geometry,index_)) > threshold:
new_geom.append(pygeos.geometry.get_geometry(x.geometry,index_))
return pygeos.creation.multipolygons(numpy.array(new_geom))
def poly_files(data_path,global_shape,regionalized=False):
"""
This function will create the .poly files from the world shapefile. These
.poly files are used to extract data from the openstreetmap files.
This function is adapted from the OSMPoly function in QGIS.
Arguments:
*data_path* : base path to location of all files.
*global_shape*: exact path to the global shapefile used to create the poly files.
Optional Arguments:
*save_shape_file* : Default is **False**. Set to **True** will the new shapefile with the
countries that we include in this analysis will be saved.
*regionalized* : Default is **False**. Set to **True** will perform the analysis
on a regional level.
Returns:
*.poly file* for each country in a new dir in the working directory.
"""
# =============================================================================
# """ Create output dir for .poly files if it is doesnt exist yet"""
# =============================================================================
poly_dir = os.path.join(data_path,'country_poly_files')
if regionalized == True:
poly_dir = os.path.join(data_path,'regional_poly_files')
if not os.path.exists(poly_dir):
os.makedirs(poly_dir)
# =============================================================================
# """Load country shapes and country list and only keep the required countries"""
# =============================================================================
wb_poly = geopandas.read_file(global_shape)
# filter polygon file
if regionalized == True:
wb_poly = wb_poly.loc[wb_poly['GID_0'] != '-']
else:
wb_poly = wb_poly.loc[wb_poly['GID_0'] != '-']
wb_poly.crs = {'init' :'epsg:4326'}
# =============================================================================
# """ The important part of this function: create .poly files to clip the country
# data from the openstreetmap file """
# =============================================================================
num = 0
# iterate over the counties (rows) in the world shapefile
for f in wb_poly.iterrows():
f = f[1]
num = num + 1
geom=f.geometry
# try:
# this will create a list of the different subpolygons
if geom.geom_type == 'MultiPolygon':
polygons = geom
# the list will be lenght 1 if it is just one polygon
elif geom.geom_type == 'Polygon':
polygons = [geom]
# define the name of the output file, based on the ISO3 code
ctry = f['GID_0']
if regionalized == True:
attr=f['GID_1']
else:
attr=f['GID_0']
# start writing the .poly file
f = open(poly_dir + "/" + attr +'.poly', 'w')
f.write(attr + "\n")
i = 0
# loop over the different polygons, get their exterior and write the
# coordinates of the ring to the .poly file
for polygon in polygons:
if ctry == 'CAN':
dist = geodesic(reversed(polygon.centroid.coords[:1][0]), (83.24,-79.80), ellipsoid='WGS-84').kilometers
if dist < 2000:
continue
if ctry == 'RUS':
dist = geodesic(reversed(polygon.centroid.coords[:1][0]), (82.26,58.89), ellipsoid='WGS-84').kilometers
if dist < 500:
continue
polygon = numpy.array(polygon.exterior)
j = 0
f.write(str(i) + "\n")
for ring in polygon:
j = j + 1
f.write(" " + str(ring[0]) + " " + str(ring[1]) +"\n")
i = i + 1
# close the ring of one subpolygon if done
f.write("END" +"\n")
# close the file when done
f.write("END" +"\n")
f.close()
# except:
# print(f['GID_1'])
def clip_osm_osmconvert(data_path,planet_path,area_poly,area_pbf):
""" Clip the an area osm file from the larger continent (or planet) file and save to a new osm.pbf file.
This is much faster compared to clipping the osm.pbf file while extracting through ogr2ogr.
This function uses the osmconvert tool, which can be found at http://wiki.openstreetmap.org/wiki/Osmconvert.
Either add the directory where this executable is located to your environmental variables or just put it in the 'scripts' directory.
Arguments:
*continent_osm*: path string to the osm.pbf file of the continent associated with the country.
*area_poly*: path string to the .poly file, made through the 'create_poly_files' function.
*area_pbf*: path string indicating the final output dir and output name of the new .osm.pbf file.
Returns:
a clipped .osm.pbf file.
"""
print('{} started!'.format(area_pbf))
osm_convert_path = os.path.join('osmconvert64-0.8.8p')
try:
if (os.path.exists(area_pbf) is not True):
os.system('{} {} -B={} --complete-ways -o={}'.format(osm_convert_path,planet_path,area_poly,area_pbf))
print('{} finished!'.format(area_pbf))
except:
print('{} did not finish!'.format(area_pbf))
def clip_osm_osmosis(planet_path,area_poly,area_pbf):
""" Clip the an area osm file from the larger continent (or planet) file and save to a new osm.pbf file.
This is much faster compared to clipping the osm.pbf file while extracting through ogr2ogr.
This function uses the osmconvert tool, which can be found at http://wiki.openstreetmap.org/wiki/Osmconvert.
Either add the directory where this executable is located to your environmental variables or just put it in the 'scripts' directory.
Arguments:
*planet_path*: path string to the planet-latest.osm.pbf file
*area_poly*: path string to the .poly file, made through the 'create_poly_files' function.
*area_pbf*: path string indicating the final output dir and output name of the new .osm.pbf file.
Returns:
a clipped .osm.pbf file.
"""
print('{} started!'.format(area_pbf))
#osmosis_convert_path = os.path.join("..","osmosis","bin","osmosis.bat")
osmosis_convert_path = os.path.join("osmosis")
try:
if (os.path.exists(area_pbf) is not True):
os.system('{} --read-pbf file="{}" --bounding-polygon file="{}" --write-pbf file="{}"'.format(osmosis_convert_path,planet_path,area_poly,area_pbf))
print('{} finished!'.format(area_pbf))
except:
print('{} did not finish!'.format(area_pbf))
def single_country(country,data_path,regionalized=False,create_poly_files=False,osm_convert=True,geofabrik=False):
"""
Obtain a country osm.pbf file. This can be done by extracting it from the planet file or by downloading it from GeoFabrik.
This function has the option to extract individual regions.
Arguments:
*country* : The country for which we want extract the data.
Keyword Arguments:
*regionalized* : Default is **False**. Set to **True** will parallelize the extraction over all regions within a country.
*create_poly_files* : Default is **False**. Set to **True** will create new .poly files.
*osm_convert* : Default is **True**. Set to **False** will use osmosis.
*geofabrik* : Default is **False**. Set to **True** will try to obtain country file from geofabrik.
"""
if geofabrik==False:
# path to planet file
planet_path = os.path.join(data_path,'planet_osm','planet-latest.osm.pbf')
# global shapefile path
if regionalized == True:
world_path = os.path.join(data_path,'input_data','global_regions.gpkg')
else:
world_path = os.path.join(data_path,'input_data','global_countries.gpkg')
if not os.path.exists(os.path.join(data_path,'country_poly_files')):
os.makedirs(os.path.join(data_path,'country_poly_files'))
else:
world_path = os.path.join(data_path,'input_data','global_regions.gpkg')
# create poly files for all countries
if (create_poly_files == True) & (geofabrik==False):
poly_files(data_path,world_path,regionalized=regionalized)
elif (create_poly_files == True) & (geofabrik==True):
poly_files(data_path,world_path,regionalized=regionalized)
if not os.path.exists(os.path.join(data_path,'country_osm')):
os.makedirs(os.path.join(data_path,'country_osm'))
ctry_poly = os.path.join(data_path,'country_poly_files','{}.poly'.format(country))
ctry_pbf = os.path.join(data_path,'country_osm','{}.osm.pbf'.format(country))
if (regionalized == False) & (geofabrik==True):
country_osm(country,data_path)
elif (regionalized == False) & (geofabrik==False):
if osm_convert == True:
try:
clip_osm_osmconvert(data_path,planet_path,ctry_poly,ctry_pbf)
except:
print("NOTE: osmconvert is not correctly installed. Please check your environmental variables settings.")
else:
try:
clip_osm_osmosis(data_path,planet_path,ctry_poly,ctry_pbf)
except:
print("NOTE: osmosis is not correctly installed. Please check your environmental variables settings.")
elif regionalized == True:
country_pbf_exists = os.path.exists(ctry_pbf)
if (country_pbf_exists==False) & (geofabrik == True):
country_osm(country,data_path)
elif (country_pbf_exists==False) & (osm_convert == True):
try:
clip_osm_osmconvert(data_path,planet_path,ctry_poly,ctry_pbf)
except:
print("NOTE: osmconvert is not correctly installed. Please check your environmental variables settings.")
elif (country_pbf_exists==False) & (osm_convert == True):
try:
clip_osm_osmosis(data_path,planet_path,ctry_poly,ctry_pbf)
except:
print("NOTE: osmosis is not correctly installed. Please check your environmental variables settings.")
else:
print('Country file is already downloaded')
if not os.path.exists(os.path.join(data_path,'regional_poly_files')):
os.makedirs(os.path.join(data_path,'regional_poly_files'))
if not os.path.exists(os.path.join(data_path,'region_osm_admin1')):
os.makedirs(os.path.join(data_path,'region_osm_admin1'))
get_poly_files = [x for x in os.listdir(os.path.join(data_path,'regional_poly_files')) if x.startswith(country)]
polyPaths = [os.path.join(data_path,'regional_poly_files',x) for x in get_poly_files]
area_pbfs = [os.path.join(data_path,'region_osm_admin1',x.split('.')[0]+'_'+x.split('.')[1]+'.osm.pbf') for x in get_poly_files]
data_paths = [data_path]*len(polyPaths)
planet_paths = [ctry_pbf]*len(polyPaths)
# and run all regions parallel to each other
pool = Pool(cpu_count()-1)
if osm_convert == True:
pool.starmap(clip_osm_osmconvert, zip(data_paths,planet_paths,polyPaths,area_pbfs),chunksize=1)
else:
pool.starmap(clip_osm_osmosis, zip(data_paths,planet_paths,polyPaths,area_pbfs),chunksize=1)
def all_countries(subset = [], regionalized=False,reversed_order=False,osm_convert=True):
"""
Clip all countries from the planet osm file and save them to individual osm.pbf files
Optional Arguments:
*subset* : allow for a pre-defined subset of countries. REquires ISO3 codes. Will run all countries if left empty.
*regionalized* : Default is **False**. Set to **True** if you want to have the regions of a country as well.
*reversed_order* : Default is **False**. Set to **True** to work backwards for a second process of the same country set to prevent overlapping calculations.
Returns:
clipped osm.pbf files for the defined set of countries (either the whole world by default or the specified subset)
"""
# set data path
data_path = os.path.join('..','data')
# path to planet file
planet_path = os.path.join(data_path,'planet_osm','planet-latest.osm.pbf')
# global shapefile path
if regionalized == True:
world_path = os.path.join(data_path,'cleaned_shapes','global_regions.gpkg')
else:
world_path = os.path.join(data_path,'cleaned_shapes','global_countries.gpkg')
# create poly files for all countries
poly_files(data_path,world_path,regionalized=regionalized)
# prepare lists for multiprocessing
if not os.path.exists(os.path.join(data_path,'country_poly_files')):
os.makedirs(os.path.join(data_path,'country_poly_files'))
if not os.path.exists(os.path.join(data_path,'country_osm')):
os.makedirs(os.path.join(data_path,'country_osm'))
if regionalized == False:
get_poly_files = os.listdir(os.path.join(data_path,'country_poly_files'))
if len(subset) > 0:
polyPaths = [os.path.join(data_path,'country_poly_files',x) for x in get_poly_files if x[:3] in subset]
area_pbfs = [os.path.join(data_path,'country_osm',x.split('.')[0]+'.osm.pbf') for x in get_poly_files if x[:3] in subset]
else:
polyPaths = [os.path.join(data_path,'country_poly_files',x) for x in get_poly_files]
area_pbfs = [os.path.join(data_path,'country_osm',x.split('.')[0]+'.osm.pbf') for x in get_poly_files]
big_osm_paths = [planet_path]*len(polyPaths)
elif regionalized == True:
if not os.path.exists(os.path.join(data_path,'regional_poly_files')):
os.makedirs(os.path.join(data_path,'regional_poly_files'))
if not os.path.exists(os.path.join(data_path,'region_osm')):
os.makedirs(os.path.join(data_path,'region_osm_admin1'))
get_poly_files = os.listdir(os.path.join(data_path,'regional_poly_files'))
if len(subset) > 0:
polyPaths = [os.path.join(data_path,'regional_poly_files',x) for x in get_poly_files if x[:3] in subset]
area_pbfs = [os.path.join(data_path,'region_osm_admin1',x.split('.')[0]+'.osm.pbf') for x in get_poly_files if x[:3] in subset]
big_osm_paths = [os.path.join(data_path,'country_osm',x[:3]+'.osm.pbf') for x in get_poly_files if x[:3] in subset]
else:
polyPaths = [os.path.join(data_path,'regional_poly_files',x) for x in get_poly_files]
area_pbfs = [os.path.join(data_path,'region_osm_admin1',x.split('.')[0]+'.osm.pbf') for x in get_poly_files]
big_osm_paths = [os.path.join(data_path,'country_osm',x[:3]+'.osm.pbf') for x in get_poly_files]
data_paths = [data_path]*len(polyPaths)
# allow for reversed order if you want to run two at the same time (convenient to work backwards for the second process, to prevent overlapping calculation)
if reversed_order == True:
polyPaths = polyPaths[::-1]
area_pbfs = area_pbfs[::-1]
big_osm_paths = big_osm_paths[::-1]
# extract all country osm files through multiprocesing
pool = Pool(cpu_count()-1)
if osm_convert==True:
pool.starmap(clip_osm_osmconvert, zip(data_paths,big_osm_paths,polyPaths,area_pbfs),chunksize=1)
else:
pool.starmap(clip_osm_osmosis, zip(data_paths,big_osm_paths,polyPaths,area_pbfs),chunksize=1)
def country_list():
"""
Dictioniary to find match ISO-3 with required strings to download country from GeoFabrik.
"""
return {
'AFG' : ('asia','afghanistan'),
'ALB' : ('europe','albania'),
'DZA' : ('africa','algeria'),
'AND' : ('europe','andorra'),
'AGO' : ('africa','angola'),
'BEN' : ('africa', 'benin'),
'BWA' : ('africa', 'botswana'),
'BFA' : ('africa', 'burkina-faso'),
'BDI' : ('africa', 'burundi'),
'CMR' : ('africa', 'cameroon'),
'BEN' : ('africa', 'canary-islands'),
'CPV' : ('africa', 'cape-verde'),
'CAF' : ('africa', 'central-african-republic'),
'TCD' : ('africa', 'chad'),
'COM' : ('africa', 'comores'),
'COG' : ('africa', 'congo-brazzaville'),
'COD' : ('africa', 'congo-democratic-republic'),
'DJI' : ('africa', 'djibouti'),
'EGY' : ('africa', 'egypt'),
'GNQ' : ('africa', 'equatorial-guinea'),
'ERI' : ('africa', 'eritrea'),
'ETH' : ('africa', 'ethiopia'),
'GAB' : ('africa', 'gabon'),
'GMB' : ('africa', 'senegal-and-gambia'), #TOGETHER WITH SENEGAL
'GHA' : ('africa', 'ghana'),
'GIN' : ('africa', 'guinea'),
'GNB' : ('africa', 'guinea-bissau'),
'CIV' : ('africa', 'ivory-coast'),
'KEN' : ('africa', 'kenya'),
'LSO' : ('africa', 'lesotho'),
'LBR' : ('africa', 'liberia'),
'LBY' : ('africa', 'libya'),
'MDG' : ('africa', 'madagascar'),
'MWI' : ('africa', 'malawi'),
'MLI' : ('africa', 'mali'),
'MRT' : ('africa', 'mauritania'),
'MAR' : ('africa', 'morocco'),
'MOZ' : ('africa', 'mozambique'),
'NAM' : ('africa', 'namibia'),
'NER' : ('africa', 'niger'),
'NGA' : ('africa', 'nigeria'),
'RWA' : ('africa', 'rwanda'),
'SHN' : ('africa', 'saint-helena-ascension-and-tristan-da-cunha'),
'STP' : ('africa', 'sao-tome-and-principe'),
'SEN' : ('africa', 'senegal-and-gambia'), #TOGETHER WITH THE GAMBIA
'SYC' : ('africa', 'seychelles'),
'SLE' : ('africa', 'sierra-leone'),
'SOM' : ('africa', 'somalia'),
'ZAF' : ('africa', 'south-africa'),
'SDN' : ('africa', 'sudan'),
'SSD' : ('africa', 'south-sudan'),
'SWZ' : ('africa', 'swaziland'),
'TZA' : ('africa', 'tanzania'),
'TGO' : ('africa', 'togo'),
'TUN' : ('africa', 'tunisia'),
'UGA' : ('africa', 'uganda'),
'ZMB' : ('africa', 'zambia'),
'ZWE' : ('africa', 'zimbabwe'),
'ARM' : ('asia', 'armenia'),
'AZE' : ('asia', 'azerbaijan'),
'BGD' : ('asia', 'bangladesh'),
'BTN' : ('asia', 'bhutan'),
'KHM' : ('asia', 'cambodia'),
'CHN' : ('asia', 'china'),
'SAU' : ('asia', 'gcc-states'), #Together with Kuwait, the United Arab Emirates, Qatar, Bahrain, and Oman
'KWT' : ('asia', 'gcc-states'), #Together with Saudi Arabia, the United Arab Emirates, Qatar, Bahrain, and Oman
'ARE' : ('asia', 'gcc-states'), #Together with Saudi Arabia, Kuwait, Qatar, Bahrain, and Oman
'QAT' : ('asia', 'gcc-states'), #Together with Saudi Arabia, Kuwait, the United Arab Emirates, Bahrain, and Oman
'OMN' : ('asia', 'gcc-states'), #Together with Saudi Arabia, Kuwait, the United Arab Emirates, Qatar and Oman
'BHR' : ('asia', 'gcc-states'), #Together with Saudi Arabia, Kuwait, the United Arab Emirates, Qatar and Bahrain
'IND' : ('asia', 'india'),
'IDN' : ('asia', 'indonesia'),
'IRN' : ('asia', 'iran'),
'IRQ' : ('asia', 'iraq'),
'ISR' : ('asia', 'israel-and-palestine'), # TOGETHER WITH PALESTINE
'PSE' : ('asia', 'israel-and-palestine'), # TOGETHER WITH ISRAEL
'JPN' : ('asia', 'japan'),
'JOR' : ('asia', 'jordan'),
'KAZ' : ('asia', 'kazakhstan'),
'KGZ' : ('asia', 'kyrgyzstan'),
'LAO' : ('asia', 'laos'),
'LBN' : ('asia', 'lebanon'),
'MYS' : ('asia', 'malaysia-singapore-brunei'), # TOGETHER WITH SINGAPORE AND BRUNEI
'SGP' : ('asia', 'malaysia-singapore-brunei'), # TOGETHER WITH MALAYSIA AND BRUNEI
'BRN' : ('asia', 'malaysia-singapore-brunei'), # TOGETHER WITH MALAYSIA AND SINGAPORE
'MDV' : ('asia', 'maldives'),
'MNG' : ('asia', 'mongolia'),
'MMR' : ('asia', 'myanmar'),
'NPL' : ('asia', 'nepal'),
'PRK' : ('asia', 'north-korea'),
'PAK' : ('asia', 'pakistan'),
'PHL' : ('asia', 'philippines'),
'RUS' : ('asia', 'russia'),
'KOR' : ('asia', 'south-korea'),
'LKA' : ('asia', 'sri-lanka'),
'SYR' : ('asia', 'syria'),
'TWN' : ('asia', 'taiwan'),
'TJK' : ('asia', 'tajikistan'),
'THA' : ('asia', 'thailand'),
'TKM' : ('asia', 'turkmenistan'),
'UZB' : ('asia', 'uzbekistan'),
'VNM' : ('asia', 'vietnam'),
'YEM' : ('asia', 'yemen'),
'BHS' : ('central-america', 'bahamas'),
'BLZ' : ('central-america', 'belize'),
'CUB' : ('central-america', 'cuba'),
'GTM' : ('central-america', 'guatemala'),
'HTI' : ('central-america', 'haiti-and-domrep'), # TOGETHER WITH DOMINICAN REPUBLIC
'DOM' : ('central-america', 'haiti-and-domrep'), # TOGETHER WITH HAITI
'JAM' : ('central-america', 'jamaica'),
'NIC' : ('central-america', 'nicaragua'),
'AUT' : ('europe', 'austria'),
'BLR' : ('europe', 'belarus'),
'BEL' : ('europe', 'belgium'),
'BIH' : ('europe', 'bosnia-herzegovina'),
'BGR' : ('europe', 'bulgaria'),
'HRV' : ('europe', 'croatia'),
'CYP' : ('europe', 'cyprus'),
'CZE' : ('europe', 'czech-republic'),
'DNK' : ('europe', 'denmark'),
'EST' : ('europe', 'estonia'),
'FRO' : ('europe', 'faroe-islands'),
'FIN' : ('europe', 'finland'),
'FRA' : ('europe', 'france'),
'GEO' : ('europe', 'georgia'),
'DEU' : ('europe', 'germany'),
'GBR' : ('europe', 'great-britain'), # DOES NOT INCLUDE NORTHERN ISLAND
'GRC' : ('europe', 'greece'),
'HUN' : ('europe', 'hungary'),
'ISL' : ('europe', 'iceland'),
'IRL' : ('europe', 'ireland-and-northern-ireland'),
'IMN' : ('europe', 'isle-of-man'),
'ITA' : ('europe', 'italy'),
'LVA' : ('europe', 'latvia'),
'LIE' : ('europe', 'liechtenstein'),
'LTU' : ('europe', 'lithuania'),
'LUX' : ('europe', 'luxembourg'),
'MKD' : ('europe', 'macedonia'),
'MLT' : ('europe', 'malta'),
'MDA' : ('europe', 'moldova'),
'MCO' : ('europe', 'monaco'),
'MNE' : ('europe', 'montenegro'),
'NLD' : ('europe', 'netherlands'),
'NOR' : ('europe', 'norway'),
'POL' : ('europe', 'poland'),
'PRT' : ('europe', 'portugal'),
'ROU' : ('europe', 'romania'),
'RUS' : ('europe', 'russia'),
'SRB' : ('europe', 'serbia'),
'SVK' : ('europe', 'slovakia'),
'SVN' : ('europe', 'slovenia'),
'ESP' : ('europe', 'spain'),
'SWE' : ('europe', 'sweden'),
'CHE' : ('europe', 'switzerland'),
'TUR' : ('europe', 'turkey'),
'UKR' : ('europe', 'ukraine'),
'CAN' : ('north-america', 'canada'),
'GRL' : ('north-america', 'greenland'),
'MEX' : ('north-america', 'mexico'),
'USA' : ('north-america', 'us'),
'AUS' : ('australia-oceania', 'australia'),
'COK' : ('australia-oceania', 'cook-islands'),
'FJI' : ('australia-oceania', 'fiji'),
'KIR' : ('australia-oceania', 'kiribati'),
'MHL' : ('australia-oceania', 'marshall-islands'),
'FSM' : ('australia-oceania', 'micronesia'),
'NRU' : ('australia-oceania', 'nauru'),
'NCL' : ('australia-oceania', 'new-caledonia'),
'NZL' : ('australia-oceania', 'new-zealand'),
'NIU' : ('australia-oceania', 'niue'),
'PLW' : ('australia-oceania', 'palau'),
'PNG' : ('australia-oceania', 'papua-new-guinea'),
'WSM' : ('australia-oceania', 'samoa'),
'SLB' : ('australia-oceania', 'solomon-islands'),
'TON' : ('australia-oceania', 'tonga'),
'TUV' : ('australia-oceania', 'tuvalu'),
'VUT' : ('australia-oceania', 'vanuatu'),
'ARG' : ('south-america', 'argentina'),
'BOL' : ('south-america', 'bolivia'),
'BRA' : ('south-america', 'brazil'),
'CHL' : ('south-america', 'chile'),
'COL' : ('south-america', 'colombia'),
'ECU' : ('south-america', 'ecuador'),
'PRY' : ('south-america', 'paraguay'),
'PER' : ('south-america', 'peru'),
'SUR' : ('south-america', 'suriname'),
'URY' : ('south-america', 'uruguay'),
'VEN' : ('south-america', 'venezuela'),
}
|
[
"pygeos.geometry.get_geometry",
"os.makedirs",
"pygeos.simplify",
"os.path.exists",
"tqdm.tqdm.pandas",
"pygeos.area",
"pygeos.to_wkb",
"multiprocessing.cpu_count",
"pygeos.geometry.get_type_id",
"geopandas.GeoDataFrame",
"pygeos.from_shapely",
"numpy.array",
"pygeos.geometry.get_num_geometries",
"pandas.concat",
"os.path.join",
"os.listdir",
"geopandas.read_file"
] |
[((548, 585), 'os.path.join', 'os.path.join', (['data_path', '"""planet_osm"""'], {}), "(data_path, 'planet_osm')\n", (560, 585), False, 'import os\n'), ((1429, 1467), 'os.path.join', 'os.path.join', (['data_path', '"""country_osm"""'], {}), "(data_path, 'country_osm')\n", (1441, 1467), False, 'import os\n'), ((2375, 2408), 'os.path.join', 'os.path.join', (['data_path', '"""GADM36"""'], {}), "(data_path, 'GADM36')\n", (2387, 2408), False, 'import os\n'), ((3312, 3345), 'os.path.join', 'os.path.join', (['data_path', '"""GADM36"""'], {}), "(data_path, 'GADM36')\n", (3324, 3345), False, 'import os\n'), ((4685, 4740), 'os.path.join', 'os.path.join', (['data_path', '"""GADM36"""', '"""gadm36_levels.gpkg"""'], {}), "(data_path, 'GADM36', 'gadm36_levels.gpkg')\n", (4697, 4740), False, 'import os\n'), ((4766, 4807), 'os.path.join', 'os.path.join', (['data_path', '"""cleaned_shapes"""'], {}), "(data_path, 'cleaned_shapes')\n", (4778, 4807), False, 'import os\n'), ((12762, 12807), 'os.path.join', 'os.path.join', (['data_path', '"""country_poly_files"""'], {}), "(data_path, 'country_poly_files')\n", (12774, 12807), False, 'import os\n'), ((13248, 13281), 'geopandas.read_file', 'geopandas.read_file', (['global_shape'], {}), '(global_shape)\n', (13267, 13281), False, 'import geopandas\n'), ((16814, 16849), 'os.path.join', 'os.path.join', (['"""osmconvert64-0.8.8p"""'], {}), "('osmconvert64-0.8.8p')\n", (16826, 16849), False, 'import os\n'), ((18230, 18253), 'os.path.join', 'os.path.join', (['"""osmosis"""'], {}), "('osmosis')\n", (18242, 18253), False, 'import os\n'), ((24328, 24354), 'os.path.join', 'os.path.join', (['""".."""', '"""data"""'], {}), "('..', 'data')\n", (24340, 24354), False, 'import os\n'), ((24406, 24468), 'os.path.join', 'os.path.join', (['data_path', '"""planet_osm"""', '"""planet-latest.osm.pbf"""'], {}), "(data_path, 'planet_osm', 'planet-latest.osm.pbf')\n", (24418, 24468), False, 'import os\n'), ((684, 711), 'os.path.exists', 'os.path.exists', (['osm_path_in'], {}), '(osm_path_in)\n', (698, 711), False, 'import os\n'), ((722, 746), 'os.makedirs', 'os.makedirs', (['osm_path_in'], {}), '(osm_path_in)\n', (733, 746), False, 'import os\n'), ((850, 873), 'os.listdir', 'os.listdir', (['osm_path_in'], {}), '(osm_path_in)\n', (860, 873), False, 'import os\n'), ((1566, 1593), 'os.path.exists', 'os.path.exists', (['osm_path_in'], {}), '(osm_path_in)\n', (1580, 1593), False, 'import os\n'), ((1604, 1628), 'os.makedirs', 'os.makedirs', (['osm_path_in'], {}), '(osm_path_in)\n', (1615, 1628), False, 'import os\n'), ((2507, 2535), 'os.path.exists', 'os.path.exists', (['gadm_path_in'], {}), '(gadm_path_in)\n', (2521, 2535), False, 'import os\n'), ((2546, 2571), 'os.makedirs', 'os.makedirs', (['gadm_path_in'], {}), '(gadm_path_in)\n', (2557, 2571), False, 'import os\n'), ((2670, 2694), 'os.listdir', 'os.listdir', (['gadm_path_in'], {}), '(gadm_path_in)\n', (2680, 2694), False, 'import os\n'), ((3444, 3472), 'os.path.exists', 'os.path.exists', (['gadm_path_in'], {}), '(gadm_path_in)\n', (3458, 3472), False, 'import os\n'), ((3483, 3508), 'os.makedirs', 'os.makedirs', (['gadm_path_in'], {}), '(gadm_path_in)\n', (3494, 3508), False, 'import os\n'), ((3624, 3648), 'os.listdir', 'os.listdir', (['gadm_path_in'], {}), '(gadm_path_in)\n', (3634, 3648), False, 'import os\n'), ((4821, 4856), 'os.path.exists', 'os.path.exists', (['cleaned_shapes_path'], {}), '(cleaned_shapes_path)\n', (4835, 4856), False, 'import os\n'), ((4871, 4903), 'os.makedirs', 'os.makedirs', (['cleaned_shapes_path'], {}), '(cleaned_shapes_path)\n', (4882, 4903), False, 'import os\n'), ((5139, 5187), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Convert geometries to pygeos"""'}), "(desc='Convert geometries to pygeos')\n", (5150, 5187), False, 'from tqdm import tqdm\n'), ((5510, 5548), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Remove tiny shapes"""'}), "(desc='Remove tiny shapes')\n", (5521, 5548), False, 'from tqdm import tqdm\n'), ((5677, 5714), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Simplify geometry"""'}), "(desc='Simplify geometry')\n", (5688, 5714), False, 'from tqdm import tqdm\n'), ((6071, 6129), 'os.path.join', 'os.path.join', (['cleaned_shapes_path', '"""global_countries.gpkg"""'], {}), "(cleaned_shapes_path, 'global_countries.gpkg')\n", (6083, 6129), False, 'import os\n'), ((6138, 6192), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Convert geometries back to shapely"""'}), "(desc='Convert geometries back to shapely')\n", (6149, 6192), False, 'from tqdm import tqdm\n'), ((6540, 6598), 'os.path.join', 'os.path.join', (['cleaned_shapes_path', '"""global_countries.gpkg"""'], {}), "(cleaned_shapes_path, 'global_countries.gpkg')\n", (6552, 6598), False, 'import os\n'), ((6610, 6640), 'os.path.exists', 'os.path.exists', (['glob_ctry_path'], {}), '(glob_ctry_path)\n', (6624, 6640), False, 'import os\n'), ((7037, 7085), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Convert geometries to pygeos"""'}), "(desc='Convert geometries to pygeos')\n", (7048, 7085), False, 'from tqdm import tqdm\n'), ((7271, 7309), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Remove tiny shapes"""'}), "(desc='Remove tiny shapes')\n", (7282, 7309), False, 'from tqdm import tqdm\n'), ((7447, 7484), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Simplify geometry"""'}), "(desc='Simplify geometry')\n", (7458, 7484), False, 'from tqdm import tqdm\n'), ((8858, 8912), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Convert geometries back to shapely"""'}), "(desc='Convert geometries back to shapely')\n", (8869, 8912), False, 'from tqdm import tqdm\n'), ((10096, 10135), 'pygeos.geometry.get_type_id', 'pygeos.geometry.get_type_id', (['x.geometry'], {}), '(x.geometry)\n', (10123, 10135), False, 'import pygeos\n'), ((12863, 12909), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""'], {}), "(data_path, 'regional_poly_files')\n", (12875, 12909), False, 'import os\n'), ((12927, 12951), 'os.path.exists', 'os.path.exists', (['poly_dir'], {}), '(poly_dir)\n', (12941, 12951), False, 'import os\n'), ((12962, 12983), 'os.makedirs', 'os.makedirs', (['poly_dir'], {}), '(poly_dir)\n', (12973, 12983), False, 'import os\n'), ((19586, 19648), 'os.path.join', 'os.path.join', (['data_path', '"""planet_osm"""', '"""planet-latest.osm.pbf"""'], {}), "(data_path, 'planet_osm', 'planet-latest.osm.pbf')\n", (19598, 19648), False, 'import os\n'), ((20087, 20147), 'os.path.join', 'os.path.join', (['data_path', '"""input_data"""', '"""global_regions.gpkg"""'], {}), "(data_path, 'input_data', 'global_regions.gpkg')\n", (20099, 20147), False, 'import os\n'), ((24553, 24617), 'os.path.join', 'os.path.join', (['data_path', '"""cleaned_shapes"""', '"""global_regions.gpkg"""'], {}), "(data_path, 'cleaned_shapes', 'global_regions.gpkg')\n", (24565, 24617), False, 'import os\n'), ((24649, 24715), 'os.path.join', 'os.path.join', (['data_path', '"""cleaned_shapes"""', '"""global_countries.gpkg"""'], {}), "(data_path, 'cleaned_shapes', 'global_countries.gpkg')\n", (24661, 24715), False, 'import os\n'), ((1002, 1052), 'os.path.join', 'os.path.join', (['osm_path_in', '"""planet-latest.osm.pbf"""'], {}), "(osm_path_in, 'planet-latest.osm.pbf')\n", (1014, 1052), False, 'import os\n'), ((1751, 1774), 'os.listdir', 'os.listdir', (['osm_path_in'], {}), '(osm_path_in)\n', (1761, 1774), False, 'import os\n'), ((2827, 2879), 'os.path.join', 'os.path.join', (['gadm_path_in', '"""gadm36_levels_gpkg.zip"""'], {}), "(gadm_path_in, 'gadm36_levels_gpkg.zip')\n", (2839, 2879), False, 'import os\n'), ((3045, 3097), 'os.path.join', 'os.path.join', (['gadm_path_in', '"""gadm36_levels_gpkg.zip"""'], {}), "(gadm_path_in, 'gadm36_levels_gpkg.zip')\n", (3057, 3097), False, 'import os\n'), ((5053, 5099), 'geopandas.read_file', 'geopandas.read_file', (['gadm_path'], {'layer': '"""level0"""'}), "(gadm_path, layer='level0')\n", (5072, 5099), False, 'import geopandas\n'), ((9125, 9186), 'pandas.concat', 'pandas.concat', (['[gadm_level_x, mis_country]'], {'ignore_index': '(True)'}), '([gadm_level_x, mis_country], ignore_index=True)\n', (9138, 9186), False, 'import pandas\n'), ((9314, 9370), 'os.path.join', 'os.path.join', (['cleaned_shapes_path', '"""global_regions.gpkg"""'], {}), "(cleaned_shapes_path, 'global_regions.gpkg')\n", (9326, 9370), False, 'import os\n'), ((10290, 10329), 'pygeos.geometry.get_type_id', 'pygeos.geometry.get_type_id', (['x.geometry'], {}), '(x.geometry)\n', (10317, 10329), False, 'import pygeos\n'), ((15300, 15329), 'numpy.array', 'numpy.array', (['polygon.exterior'], {}), '(polygon.exterior)\n', (15311, 15329), False, 'import numpy\n'), ((16876, 16900), 'os.path.exists', 'os.path.exists', (['area_pbf'], {}), '(area_pbf)\n', (16890, 16900), False, 'import os\n'), ((18280, 18304), 'os.path.exists', 'os.path.exists', (['area_pbf'], {}), '(area_pbf)\n', (18294, 18304), False, 'import os\n'), ((19742, 19802), 'os.path.join', 'os.path.join', (['data_path', '"""input_data"""', '"""global_regions.gpkg"""'], {}), "(data_path, 'input_data', 'global_regions.gpkg')\n", (19754, 19802), False, 'import os\n'), ((19842, 19904), 'os.path.join', 'os.path.join', (['data_path', '"""input_data"""', '"""global_countries.gpkg"""'], {}), "(data_path, 'input_data', 'global_countries.gpkg')\n", (19854, 19904), False, 'import os\n'), ((20473, 20511), 'os.path.join', 'os.path.join', (['data_path', '"""country_osm"""'], {}), "(data_path, 'country_osm')\n", (20485, 20511), False, 'import os\n'), ((20534, 20572), 'os.path.join', 'os.path.join', (['data_path', '"""country_osm"""'], {}), "(data_path, 'country_osm')\n", (20546, 20572), False, 'import os\n'), ((24897, 24942), 'os.path.join', 'os.path.join', (['data_path', '"""country_poly_files"""'], {}), "(data_path, 'country_poly_files')\n", (24909, 24942), False, 'import os\n'), ((24965, 25010), 'os.path.join', 'os.path.join', (['data_path', '"""country_poly_files"""'], {}), "(data_path, 'country_poly_files')\n", (24977, 25010), False, 'import os\n'), ((25040, 25078), 'os.path.join', 'os.path.join', (['data_path', '"""country_osm"""'], {}), "(data_path, 'country_osm')\n", (25052, 25078), False, 'import os\n'), ((25101, 25139), 'os.path.join', 'os.path.join', (['data_path', '"""country_osm"""'], {}), "(data_path, 'country_osm')\n", (25113, 25139), False, 'import os\n'), ((25212, 25257), 'os.path.join', 'os.path.join', (['data_path', '"""country_poly_files"""'], {}), "(data_path, 'country_poly_files')\n", (25224, 25257), False, 'import os\n'), ((27466, 27477), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (27475, 27477), False, 'from multiprocessing import Pool, cpu_count\n'), ((2910, 2962), 'os.path.join', 'os.path.join', (['gadm_path_in', '"""gadm36_levels_gpkg.zip"""'], {}), "(gadm_path_in, 'gadm36_levels_gpkg.zip')\n", (2922, 2962), False, 'import os\n'), ((5269, 5291), 'pygeos.from_shapely', 'pygeos.from_shapely', (['x'], {}), '(x)\n', (5288, 5291), False, 'import pygeos\n'), ((6305, 6340), 'geopandas.GeoDataFrame', 'geopandas.GeoDataFrame', (['gadm_level0'], {}), '(gadm_level0)\n', (6327, 6340), False, 'import geopandas\n'), ((6689, 6717), 'os.path.join', 'os.path.join', (['glob_ctry_path'], {}), '(glob_ctry_path)\n', (6701, 6717), False, 'import os\n'), ((7169, 7191), 'pygeos.from_shapely', 'pygeos.from_shapely', (['x'], {}), '(x)\n', (7188, 7191), False, 'import pygeos\n'), ((10644, 10667), 'pygeos.area', 'pygeos.area', (['x.geometry'], {}), '(x.geometry)\n', (10655, 10667), False, 'import pygeos\n'), ((11276, 11322), 'pygeos.geometry.get_num_geometries', 'pygeos.geometry.get_num_geometries', (['x.geometry'], {}), '(x.geometry)\n', (11310, 11322), False, 'import pygeos\n'), ((11553, 11574), 'numpy.array', 'numpy.array', (['new_geom'], {}), '(new_geom)\n', (11564, 11574), False, 'import numpy\n'), ((19936, 19981), 'os.path.join', 'os.path.join', (['data_path', '"""country_poly_files"""'], {}), "(data_path, 'country_poly_files')\n", (19948, 19981), False, 'import os\n'), ((20008, 20053), 'os.path.join', 'os.path.join', (['data_path', '"""country_poly_files"""'], {}), "(data_path, 'country_poly_files')\n", (20020, 20053), False, 'import os\n'), ((21512, 21536), 'os.path.exists', 'os.path.exists', (['ctry_pbf'], {}), '(ctry_pbf)\n', (21526, 21536), False, 'import os\n'), ((25313, 25361), 'os.path.join', 'os.path.join', (['data_path', '"""country_poly_files"""', 'x'], {}), "(data_path, 'country_poly_files', x)\n", (25325, 25361), False, 'import os\n'), ((25580, 25628), 'os.path.join', 'os.path.join', (['data_path', '"""country_poly_files"""', 'x'], {}), "(data_path, 'country_poly_files', x)\n", (25592, 25628), False, 'import os\n'), ((26200, 26246), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""'], {}), "(data_path, 'regional_poly_files')\n", (26212, 26246), False, 'import os\n'), ((3803, 3851), 'os.path.join', 'os.path.join', (['gadm_path_in', '"""gadm36_{}_gpkg.zip"""'], {}), "(gadm_path_in, 'gadm36_{}_gpkg.zip')\n", (3815, 3851), False, 'import os\n'), ((4055, 4103), 'os.path.join', 'os.path.join', (['gadm_path_in', '"""gadm36_{}_gpkg.zip"""'], {}), "(gadm_path_in, 'gadm36_{}_gpkg.zip')\n", (4067, 4103), False, 'import os\n'), ((6277, 6293), 'pygeos.to_wkb', 'pygeos.to_wkb', (['x'], {}), '(x)\n', (6290, 6293), False, 'import pygeos\n'), ((8999, 9015), 'pygeos.to_wkb', 'pygeos.to_wkb', (['x'], {}), '(x)\n', (9012, 9015), False, 'import pygeos\n'), ((22799, 22848), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""', 'x'], {}), "(data_path, 'regional_poly_files', x)\n", (22811, 22848), False, 'import os\n'), ((25899, 25945), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""'], {}), "(data_path, 'regional_poly_files')\n", (25911, 25945), False, 'import os\n'), ((25972, 26018), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""'], {}), "(data_path, 'regional_poly_files')\n", (25984, 26018), False, 'import os\n'), ((26052, 26089), 'os.path.join', 'os.path.join', (['data_path', '"""region_osm"""'], {}), "(data_path, 'region_osm')\n", (26064, 26089), False, 'import os\n'), ((26116, 26160), 'os.path.join', 'os.path.join', (['data_path', '"""region_osm_admin1"""'], {}), "(data_path, 'region_osm_admin1')\n", (26128, 26160), False, 'import os\n'), ((26302, 26351), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""', 'x'], {}), "(data_path, 'regional_poly_files', x)\n", (26314, 26351), False, 'import os\n'), ((26565, 26623), 'os.path.join', 'os.path.join', (['data_path', '"""country_osm"""', "(x[:3] + '.osm.pbf')"], {}), "(data_path, 'country_osm', x[:3] + '.osm.pbf')\n", (26577, 26623), False, 'import os\n'), ((26705, 26754), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""', 'x'], {}), "(data_path, 'regional_poly_files', x)\n", (26717, 26754), False, 'import os\n'), ((26930, 26988), 'os.path.join', 'os.path.join', (['data_path', '"""country_osm"""', "(x[:3] + '.osm.pbf')"], {}), "(data_path, 'country_osm', x[:3] + '.osm.pbf')\n", (26942, 26988), False, 'import os\n'), ((3903, 3951), 'os.path.join', 'os.path.join', (['gadm_path_in', '"""gadm36_{}_gpkg.zip"""'], {}), "(gadm_path_in, 'gadm36_{}_gpkg.zip')\n", (3915, 3951), False, 'import os\n'), ((5855, 5914), 'pygeos.simplify', 'pygeos.simplify', (['x'], {'tolerance': '(0.005)', 'preserve_topology': '(True)'}), '(x, tolerance=0.005, preserve_topology=True)\n', (5870, 5914), False, 'import pygeos\n'), ((7627, 7686), 'pygeos.simplify', 'pygeos.simplify', (['x'], {'tolerance': '(0.005)', 'preserve_topology': '(True)'}), '(x, tolerance=0.005, preserve_topology=True)\n', (7642, 7686), False, 'import pygeos\n'), ((11034, 11057), 'pygeos.area', 'pygeos.area', (['x.geometry'], {}), '(x.geometry)\n', (11045, 11057), False, 'import pygeos\n'), ((11353, 11401), 'pygeos.geometry.get_geometry', 'pygeos.geometry.get_geometry', (['x.geometry', 'index_'], {}), '(x.geometry, index_)\n', (11381, 11401), False, 'import pygeos\n'), ((11448, 11496), 'pygeos.geometry.get_geometry', 'pygeos.geometry.get_geometry', (['x.geometry', 'index_'], {}), '(x.geometry, index_)\n', (11476, 11496), False, 'import pygeos\n'), ((22376, 22422), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""'], {}), "(data_path, 'regional_poly_files')\n", (22388, 22422), False, 'import os\n'), ((22449, 22495), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""'], {}), "(data_path, 'regional_poly_files')\n", (22461, 22495), False, 'import os\n'), ((22529, 22573), 'os.path.join', 'os.path.join', (['data_path', '"""region_osm_admin1"""'], {}), "(data_path, 'region_osm_admin1')\n", (22541, 22573), False, 'import os\n'), ((22600, 22644), 'os.path.join', 'os.path.join', (['data_path', '"""region_osm_admin1"""'], {}), "(data_path, 'region_osm_admin1')\n", (22612, 22644), False, 'import os\n'), ((23192, 23203), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (23201, 23203), False, 'from multiprocessing import Pool, cpu_count\n'), ((22704, 22750), 'os.path.join', 'os.path.join', (['data_path', '"""regional_poly_files"""'], {}), "(data_path, 'regional_poly_files')\n", (22716, 22750), False, 'import os\n')]
|
import os
import argparse
import json
import onnx
import psutil
import numpy
"""
This profiler tool could run a transformer model and print out the kernel time spent on each Node of the model.
Example of profiling of longformer model:
python profiler.py --model longformer-base-4096_fp32.onnx --batch_size 1 --sequence_length 4096 --global_length 8 --samples 1000 --thread_num 8 --dummy_inputs longformer --use_gpu
"""
def parse_arguments(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('-m', '--model', required=True, type=str, help="onnx model path")
parser.add_argument('-b', '--batch_size', required=False, type=int, default=1, help="batch size of input")
parser.add_argument('-s',
'--sequence_length',
required=False,
type=int,
default=32,
help="sequence length of input")
parser.add_argument('--past_sequence_length',
required=False,
type=int,
default=1,
help="past sequence length for gpt2")
parser.add_argument('--global_length',
required=False,
type=int,
default=1,
help="number of global tokens for longformer")
parser.add_argument(
'--samples',
required=False,
type=int,
default=1000,
help="number of samples to test. Set it large enough to reduce the variance of performance result.")
parser.add_argument(
'--threshold',
required=False,
type=float,
default=0,
help=
"Threshold of ratio of run time of a node among all nodes. Nodes that nodes with lower ratio will not be in detail results."
)
parser.add_argument("--thread_num", required=False, type=int, default=-1, help="number of threads to use")
parser.add_argument('--input_ids_name',
required=False,
type=str,
default=None,
help="input name for input ids, for bert")
parser.add_argument('--segment_ids_name',
required=False,
type=str,
default=None,
help="input name for segment ids, for bert")
parser.add_argument('--input_mask_name',
required=False,
type=str,
default=None,
help="input name for attention mask, for bert")
parser.add_argument('--dummy_inputs',
required=False,
default='default',
choices=['bert', 'gpt2', 'longformer', 'default'],
help="Way to create dummy inputs. If your model is not aa")
parser.add_argument('-g', '--use_gpu', required=False, action='store_true', help="use GPU")
parser.set_defaults(use_gpu=False)
parser.add_argument(
'--basic_optimization',
required=False,
action='store_true',
help="Enable only basic graph optimizations. By default, all optimizations are enabled in OnnxRuntime")
parser.set_defaults(basic_optimization=False)
parser.add_argument('--kernel_time_only',
required=False,
action='store_true',
help="Only include the kernel time and no fence time")
parser.set_defaults(kernel_time_only=False)
parser.add_argument('-v', '--verbose', required=False, action='store_true')
parser.set_defaults(verbose=False)
args = parser.parse_args(argv)
return args
def create_bert_inputs(model, batch_size, sequence_length, samples, input_ids_name, segment_ids_name, input_mask_name):
from bert_test_data import get_bert_inputs, generate_test_data
input_ids, segment_ids, input_mask = get_bert_inputs(model, input_ids_name, segment_ids_name, input_mask_name)
all_inputs = generate_test_data(batch_size,
sequence_length,
test_cases=samples,
seed=123,
verbose=False,
input_ids=input_ids,
segment_ids=segment_ids,
input_mask=input_mask,
random_mask_length=False)
return all_inputs
def run_profile(onnx_model_path, use_gpu, basic_optimization, thread_num, batch_size, sequence_length, all_inputs):
from benchmark_helper import create_onnxruntime_session
session = create_onnxruntime_session(onnx_model_path,
use_gpu,
enable_all_optimization=not basic_optimization,
num_threads=thread_num,
enable_profiling=True)
for inputs in all_inputs:
_ = session.run(None, inputs)
profile_file = session.end_profiling()
return profile_file
def load_profile_json(profile_file):
print(f"loading profile output {profile_file} ...")
with open(profile_file, "r") as f:
sess_time = json.load(f)
assert isinstance(sess_time, list)
return sess_time
def parse_profile_results(sess_time, kernel_time_only=False, threshold=0):
node_time = {}
node_provider = {}
total = 0
for item in sess_time:
if item["cat"] == "Node" and "dur" in item and "args" in item and "op_name" in item["args"]:
if "provider" in item["args"]:
device = "CPU" if item["args"]["provider"] == "CPUExecutionProvider" else "CUDA"
if item["name"] not in node_provider:
node_provider[item["name"]] = device
else:
assert node_provider[item["name"]] == device
elif kernel_time_only:
continue
if item["name"] in node_time:
node_time[item["name"]] += item["dur"]
else:
node_time[item["name"]] = item["dur"]
total += item["dur"]
results = []
if (threshold > 0):
results.append(f"Threshold of Percentage > {threshold:.2f}%")
results.append(f"Duration\tPercentage\tProvider\tName")
for k, v in sorted(node_time.items(), key=lambda x: x[1], reverse=True):
provider = node_provider[k] if k in node_provider else ""
ratio = v / total
if ratio > threshold:
results.append(f"{v}\t{ratio * 100.0:5.2f}\t{provider}\t{k}")
return results
def group_profile_results(sess_time, kernel_time_only=False, threshold=0):
op_time = {}
op_records = {}
op_cpu_time = {}
op_cpu_records = {}
total = 0
for item in sess_time:
if item["cat"] == "Node" and "dur" in item and "args" in item and "op_name" in item["args"]:
if kernel_time_only and "provider" not in item["args"]:
continue
op_name = item["args"]["op_name"]
if op_name in op_time:
op_time[op_name] += item["dur"]
op_records[op_name] += 1
else:
op_time[op_name] = item["dur"]
op_records[op_name] = 1
total += item["dur"]
is_cpu = "provider" in item["args"] and item["args"]["provider"] == "CPUExecutionProvider"
if is_cpu:
if op_name in op_cpu_time:
op_cpu_time[op_name] += item["dur"]
op_cpu_records[op_name] += 1
else:
op_cpu_time[op_name] = item["dur"]
op_cpu_records[op_name] = 1
results = [f"Duration\tPercentage\tCalls\tCpu_Duration\tCpu_Calls\tName"]
for k, v in sorted(op_time.items(), key=lambda x: x[1], reverse=True):
calls = op_records[k]
cpu_time = op_cpu_time[k] if k in op_cpu_time else 0
cpu_calls = op_cpu_records[k] if k in op_cpu_records else 0
ratio = v / total
if ratio > threshold:
results.append(f"{v}\t{ratio * 100.0:5.2f}\t{calls}\t{cpu_time}\t{cpu_calls}\t{k}")
return results
def get_dim_from_type_proto(dim):
return getattr(dim, dim.WhichOneof('value')) if type(dim.WhichOneof('value')) == str else None
def get_shape_from_type_proto(type_proto):
return [get_dim_from_type_proto(d) for d in type_proto.tensor_type.shape.dim]
def create_dummy_inputs(onnx_model_path, batch_size, sequence_length, samples):
from onnx import TensorProto
from onnx_model import OnnxModel
onnx_model = OnnxModel(onnx.load(onnx_model_path))
dummy_inputs = {}
for input in onnx_model.get_graph_inputs_excluding_initializers():
shape = get_shape_from_type_proto(input.type)
symbol_dims = []
for i, dim in enumerate(shape):
if type(dim) == str:
symbol_dims.append(i)
# allowed symbolic dimensions: batch_size and sequence_length
if len(symbol_dims) > 2:
return None
if len(symbol_dims) > 0:
shape[symbol_dims[0]] = batch_size
if len(symbol_dims) > 1:
shape[symbol_dims[1]] = sequence_length
elem_type = input.type.tensor_type.elem_type
assert elem_type in [TensorProto.FLOAT, TensorProto.INT32, TensorProto.INT64]
data_type = numpy.float32 if elem_type == TensorProto.FLOAT else (
numpy.int64 if elem_type == TensorProto.INT64 else numpy.int32)
data = numpy.ones(shape, dtype=data_type)
dummy_inputs[input.name] = data
all_inputs = [dummy_inputs for _ in range(samples)]
return all_inputs
def create_gpt2_inputs(onnx_model_path, batch_size, sequence_length, past_sequence_length, samples):
from onnx import TensorProto
from onnx_model import OnnxModel
onnx_model = OnnxModel(onnx.load(onnx_model_path))
# The symbolic name shall be same as those used in Gpt2Helper.export_onnx(...) function.
symbols = {
'batch_size': batch_size,
'seq_len': sequence_length,
'past_seq_len': past_sequence_length,
'total_seq_len': sequence_length + past_sequence_length
}
dummy_inputs = {}
for input in onnx_model.get_graph_inputs_excluding_initializers():
shape = get_shape_from_type_proto(input.type)
for i, dim in enumerate(shape):
if type(dim) == str and dim not in symbols.keys():
raise RuntimeError(f"symbol is not supported: {dim}")
else:
shape[i] = symbols[dim]
elem_type = input.type.tensor_type.elem_type
assert elem_type in [TensorProto.FLOAT, TensorProto.INT32, TensorProto.INT64]
data_type = numpy.float32 if elem_type == TensorProto.FLOAT else (
numpy.int64 if elem_type == TensorProto.INT64 else numpy.int32)
data = numpy.ones(shape, dtype=data_type)
dummy_inputs[input.name] = data
all_inputs = [dummy_inputs for _ in range(samples)]
return all_inputs
def create_longformer_inputs(onnx_model_path, batch_size, sequence_length, global_length, samples):
from onnx import TensorProto
from onnx_model import OnnxModel
onnx_model = OnnxModel(onnx.load(onnx_model_path))
symbols = {'batch_size': batch_size, 'sequence_length': sequence_length}
dummy_inputs = {}
for input in onnx_model.get_graph_inputs_excluding_initializers():
shape = get_shape_from_type_proto(input.type)
for i, dim in enumerate(shape):
if type(dim) == str and dim not in symbols.keys():
raise RuntimeError(f"symbol is not supported: {dim}")
else:
shape[i] = symbols[dim]
elem_type = input.type.tensor_type.elem_type
assert elem_type in [TensorProto.FLOAT, TensorProto.INT32, TensorProto.INT64]
data_type = numpy.float32 if elem_type == TensorProto.FLOAT else (
numpy.int64 if elem_type == TensorProto.INT64 else numpy.int32)
if "global" in input.name:
data = numpy.zeros(shape, dtype=data_type)
data[:, :global_length] = 1
else:
data = numpy.ones(shape, dtype=data_type)
dummy_inputs[input.name] = data
all_inputs = [dummy_inputs for _ in range(samples)]
return all_inputs
def run(args):
num_threads = args.thread_num if args.thread_num > 0 else psutil.cpu_count(logical=False)
# Set OMP environment variable before importing onnxruntime. Needed for cpu only, and no impact for onnxruntime-gpu package.
if "OMP_NUM_THREADS" not in os.environ:
os.environ["OMP_NUM_THREADS"] = str(num_threads)
all_inputs = None
if args.dummy_inputs == 'bert':
all_inputs = create_bert_inputs(args.model, args.batch_size, args.sequence_length, args.samples,
args.input_ids_name, args.segment_ids_name, args.input_mask_name)
elif args.dummy_inputs == 'gpt2':
all_inputs = create_gpt2_inputs(args.model, args.batch_size, args.sequence_length, args.past_sequence_length,
args.samples)
elif args.dummy_inputs == 'longformer':
all_inputs = create_longformer_inputs(args.model, args.batch_size, args.sequence_length, args.global_length,
args.samples)
else: # default
all_inputs = create_dummy_inputs(args.model, args.batch_size, args.sequence_length, args.samples)
profile_file = run_profile(args.model, args.use_gpu, args.basic_optimization, args.thread_num, args.batch_size,
args.sequence_length, all_inputs)
profile_records = load_profile_json(profile_file)
lines = parse_profile_results(profile_records, args.kernel_time_only, args.threshold)
lines.append("-" * 64)
lines += group_profile_results(profile_records, args.kernel_time_only, args.threshold)
return lines
if __name__ == '__main__':
args = parse_arguments()
print("Arguments", args)
from benchmark_helper import setup_logger
setup_logger(args.verbose)
results = run(args)
print("Results:")
print("-" * 64)
for line in results:
print(line)
|
[
"json.load",
"argparse.ArgumentParser",
"psutil.cpu_count",
"numpy.zeros",
"numpy.ones",
"benchmark_helper.setup_logger",
"bert_test_data.generate_test_data",
"bert_test_data.get_bert_inputs",
"onnx.load",
"benchmark_helper.create_onnxruntime_session"
] |
[((470, 495), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (493, 495), False, 'import argparse\n'), ((4019, 4092), 'bert_test_data.get_bert_inputs', 'get_bert_inputs', (['model', 'input_ids_name', 'segment_ids_name', 'input_mask_name'], {}), '(model, input_ids_name, segment_ids_name, input_mask_name)\n', (4034, 4092), False, 'from bert_test_data import get_bert_inputs, generate_test_data\n'), ((4110, 4306), 'bert_test_data.generate_test_data', 'generate_test_data', (['batch_size', 'sequence_length'], {'test_cases': 'samples', 'seed': '(123)', 'verbose': '(False)', 'input_ids': 'input_ids', 'segment_ids': 'segment_ids', 'input_mask': 'input_mask', 'random_mask_length': '(False)'}), '(batch_size, sequence_length, test_cases=samples, seed=\n 123, verbose=False, input_ids=input_ids, segment_ids=segment_ids,\n input_mask=input_mask, random_mask_length=False)\n', (4128, 4306), False, 'from bert_test_data import get_bert_inputs, generate_test_data\n'), ((4802, 4957), 'benchmark_helper.create_onnxruntime_session', 'create_onnxruntime_session', (['onnx_model_path', 'use_gpu'], {'enable_all_optimization': '(not basic_optimization)', 'num_threads': 'thread_num', 'enable_profiling': '(True)'}), '(onnx_model_path, use_gpu,\n enable_all_optimization=not basic_optimization, num_threads=thread_num,\n enable_profiling=True)\n', (4828, 4957), False, 'from benchmark_helper import create_onnxruntime_session\n'), ((14331, 14357), 'benchmark_helper.setup_logger', 'setup_logger', (['args.verbose'], {}), '(args.verbose)\n', (14343, 14357), False, 'from benchmark_helper import setup_logger\n'), ((5406, 5418), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5415, 5418), False, 'import json\n'), ((8838, 8864), 'onnx.load', 'onnx.load', (['onnx_model_path'], {}), '(onnx_model_path)\n', (8847, 8864), False, 'import onnx\n'), ((9748, 9782), 'numpy.ones', 'numpy.ones', (['shape'], {'dtype': 'data_type'}), '(shape, dtype=data_type)\n', (9758, 9782), False, 'import numpy\n'), ((10103, 10129), 'onnx.load', 'onnx.load', (['onnx_model_path'], {}), '(onnx_model_path)\n', (10112, 10129), False, 'import onnx\n'), ((11111, 11145), 'numpy.ones', 'numpy.ones', (['shape'], {'dtype': 'data_type'}), '(shape, dtype=data_type)\n', (11121, 11145), False, 'import numpy\n'), ((11465, 11491), 'onnx.load', 'onnx.load', (['onnx_model_path'], {}), '(onnx_model_path)\n', (11474, 11491), False, 'import onnx\n'), ((12637, 12668), 'psutil.cpu_count', 'psutil.cpu_count', ([], {'logical': '(False)'}), '(logical=False)\n', (12653, 12668), False, 'import psutil\n'), ((12295, 12330), 'numpy.zeros', 'numpy.zeros', (['shape'], {'dtype': 'data_type'}), '(shape, dtype=data_type)\n', (12306, 12330), False, 'import numpy\n'), ((12404, 12438), 'numpy.ones', 'numpy.ones', (['shape'], {'dtype': 'data_type'}), '(shape, dtype=data_type)\n', (12414, 12438), False, 'import numpy\n')]
|
#!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Ops for evaluating the skip-thoughts model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class _ComputeLengthNormsHook(tf.train.SessionRunHook):
"""Hook to compute average norm for different input lengths."""
def __init__(self,
thought_vector_tensor,
mask_tensor,
log_dir=None,
summary_writer=None,
min_global_step=None):
"""Initializes the ComputeLengthNormsHook monitor.
Args:
thought_vector_tensor: A Tensor of shape B x D, where B is batch size,
and D is embedding dimension; output of the encoder GRU or LSTM at each
time-step. Thought vectors for the batch.
mask_tensor: A Tensor of shape B x N; masks specifying padding of the
sequences (from which lengths can be computed).
log_dir: Directory to save summary events to. Used only when
summary_writer is not provided.
summary_writer: A tf.summary.FileWriter to write summary events with.
min_global_step: If not None, the minimum global step at which to compute
perplexity. This is used to prevent computing perplexity at the start
of training, when perplexity may be very large because it's exponential
with respect to loss.
Raises:
ValueError: If neither log_dir nor summary_writer is provided.
"""
self._thought_vector_tensor = thought_vector_tensor
self._mask_tensor = mask_tensor
self._log_dir = log_dir
self._summary_writer = summary_writer
if self._log_dir is None and self._summary_writer is None:
raise ValueError("One of log_dir or summary_writer should be provided.")
self._min_global_step = min_global_step
self._global_step = tf.train.get_or_create_global_step()
def begin(self):
# Indicates whether global_step >= self._min_global_step.
self._should_run = True
# Accumulators over evaluation batches.
self._length_sum_norms = {}
self._length_counts = {}
# Initialize the FileWriter.
if self._summary_writer is None and self._log_dir:
self._summary_writer = tf.summary.FileWriterCache.get(self._log_dir)
def after_create_session(self, session, coord): # pylint: disable=unused-argument
global_step = tf.train.global_step(session, self._global_step)
if self._min_global_step and global_step < self._min_global_step:
tf.logging.info("Skipping perplexity evaluation: global step = %d < %d",
global_step, self._min_global_step)
self._should_run = False
def before_run(self, run_context): # pylint: disable=unused-argument
if self._should_run:
return tf.train.SessionRunArgs(
[self._thought_vector_tensor, self._mask_tensor])
def after_run(self, run_context, run_values):
if self._should_run:
thought_vectors, masks = run_values.results
lengths = masks.sum(axis=1)
# Compute norms
thought_vector_norms = np.linalg.norm(thought_vectors, axis=1)
# Bin by length
for i in range(lengths.shape[0]):
length = int(lengths[i])
if length not in self._length_sum_norms:
self._length_sum_norms[length] = 0.0
self._length_counts[length] = 0
self._length_sum_norms[length] += thought_vector_norms[i]
self._length_counts[length] += 1
def end(self, session):
if self._should_run:
for length in [1, 5, 10, 15, 20]:
if length in self._length_sum_norms:
average_norm = (self._length_sum_norms[length] /
self._length_counts[length])
tf.logging.info("Length %d Average Norm = %.4f", length, average_norm)
# Log to the SummaryWriter.
if self._summary_writer:
summary = tf.Summary()
value = summary.value.add()
value.simple_value = average_norm
value.tag = "length_norms/average_norm_%d" % length
global_step = tf.train.global_step(session, self._global_step)
self._summary_writer.add_summary(summary, global_step)
self._summary_writer.flush()
class _ComputePerplexityHook(tf.train.SessionRunHook):
"""Hook to compute per-word perplexity during evaluation."""
def __init__(self,
losses_tensor,
weights_tensor,
log_dir=None,
summary_writer=None,
min_global_step=None):
"""Initializes the ComputePerplexityHook monitor.
Args:
losses_tensor: A Tensor of any shape; the target cross entropy losses for
the current batch.
weights_tensor: A Tensor of weights corresponding to losses.
log_dir: Directory to save summary events to. Used only when
summary_writer is not provided.
summary_writer: A tf.summary.FileWriter to write summary events with.
min_global_step: If not None, the minimum global step at which to compute
perplexity. This is used to prevent computing perplexity at the start
of training, when perplexity may be very large because it's exponential
with respect to loss.
Raises:
ValueError: If neither log_dir nor summary_writer is provided.
"""
self._losses_tensor = losses_tensor
self._weights_tensor = weights_tensor
self._log_dir = log_dir
self._summary_writer = summary_writer
if self._log_dir is None and self._summary_writer is None:
raise ValueError("One of log_dir or summary_writer should be provided.")
self._min_global_step = min_global_step
self._global_step = tf.train.get_or_create_global_step()
def begin(self):
# Indicates whether global_step >= self._min_global_step.
self._should_run = True
# Accumulators over evaluation batches.
self._sum_losses = 0.0
self._sum_weights = 0.0
self._sum_correct = 0.0
# Initialize the FileWriter.
if self._summary_writer is None and self._log_dir:
self._summary_writer = tf.summary.FileWriterCache.get(self._log_dir)
def after_create_session(self, session, coord): # pylint: disable=unused-argument
global_step = tf.train.global_step(session, self._global_step)
if self._min_global_step and global_step < self._min_global_step:
tf.logging.info("Skipping perplexity evaluation: global step = %d < %d",
global_step, self._min_global_step)
self._should_run = False
def before_run(self, run_context): # pylint: disable=unused-argument
if self._should_run:
return tf.train.SessionRunArgs(
[self._losses_tensor, self._weights_tensor])
def after_run(self, run_context, run_values):
if self._should_run:
losses, weights = run_values.results
self._sum_losses += np.sum(losses * weights)
self._sum_weights += np.sum(weights)
def end(self, session):
if self._should_run and self._sum_weights > 0:
perplexity = float(np.exp(self._sum_losses / self._sum_weights))
tf.logging.info("Perplexity = %.4f", perplexity)
# Log perplexity, accuracy, total loss to the SummaryWriter.
if self._summary_writer:
summary = tf.Summary()
value = summary.value.add()
value.simple_value = perplexity
value.tag = "perplexity"
value = summary.value.add()
value.simple_value = self._sum_losses
value.tag = "total_loss"
global_step = tf.train.global_step(session, self._global_step)
self._summary_writer.add_summary(summary, global_step)
self._summary_writer.flush()
def evaluate_repeatedly(model,
checkpoint_dir,
eval_dir,
num_eval_examples,
min_global_step_for_perplexity=None,
master="",
eval_interval_secs=600):
"""Repeatedly searches for a checkpoint in checkpoint_dir and evaluates it.
Args:
model: A built instance of SkipThoughtsModel.
checkpoint_dir: Directory containing model checkpoints.
eval_dir: Directory to save summary events to.
num_eval_examples: Number of examples for evaluation.
min_global_step_for_perplexity: If not None, the minimum global step at
which to compute perplexity. This is used to prevent computing perplexity
at the start of training, when perplexity may be very large because it's
exponential with respect to loss.
master: Name of the TensorFlow master.
eval_interval_secs: Interval (in seconds) between evaluation runs.
"""
# Number of batches to evaluate.
num_eval_batches = int(np.ceil(num_eval_examples / model.config.batch_size))
losses_tensor = tf.concat(model.target_cross_entropy_losses, 0)
weights_tensor = tf.concat(model.target_cross_entropy_loss_weights, 0)
thought_vector_tensor = model.thought_vectors
mask_tensor = model.encode_mask
hooks = [
# Run num_eval_batches iterations.
tf.contrib.training.StopAfterNEvalsHook(num_eval_batches),
# Save a summary at the end.
tf.contrib.training.SummaryAtEndHook(log_dir=eval_dir),
# Compute per-word perplexity over the evaluation set.
_ComputePerplexityHook(
losses_tensor,
weights_tensor,
log_dir=eval_dir,
min_global_step=min_global_step_for_perplexity),
# Compute average norms for different sequence lengths.
_ComputeLengthNormsHook(
thought_vector_tensor,
mask_tensor,
log_dir=eval_dir,
min_global_step=min_global_step_for_perplexity),
]
tf.contrib.training.evaluate_repeatedly(
checkpoint_dir=checkpoint_dir,
master=master,
eval_ops=[losses_tensor, weights_tensor,
mask_tensor, thought_vector_tensor],
eval_interval_secs=eval_interval_secs,
hooks=hooks)
|
[
"numpy.sum",
"numpy.ceil",
"tensorflow.summary.FileWriterCache.get",
"tensorflow.logging.info",
"tensorflow.train.SessionRunArgs",
"tensorflow.contrib.training.StopAfterNEvalsHook",
"tensorflow.Summary",
"tensorflow.train.get_or_create_global_step",
"tensorflow.contrib.training.SummaryAtEndHook",
"tensorflow.concat",
"numpy.linalg.norm",
"numpy.exp",
"tensorflow.contrib.training.evaluate_repeatedly",
"tensorflow.train.global_step"
] |
[((9343, 9390), 'tensorflow.concat', 'tf.concat', (['model.target_cross_entropy_losses', '(0)'], {}), '(model.target_cross_entropy_losses, 0)\n', (9352, 9390), True, 'import tensorflow as tf\n'), ((9410, 9463), 'tensorflow.concat', 'tf.concat', (['model.target_cross_entropy_loss_weights', '(0)'], {}), '(model.target_cross_entropy_loss_weights, 0)\n', (9419, 9463), True, 'import tensorflow as tf\n'), ((10235, 10458), 'tensorflow.contrib.training.evaluate_repeatedly', 'tf.contrib.training.evaluate_repeatedly', ([], {'checkpoint_dir': 'checkpoint_dir', 'master': 'master', 'eval_ops': '[losses_tensor, weights_tensor, mask_tensor, thought_vector_tensor]', 'eval_interval_secs': 'eval_interval_secs', 'hooks': 'hooks'}), '(checkpoint_dir=checkpoint_dir,\n master=master, eval_ops=[losses_tensor, weights_tensor, mask_tensor,\n thought_vector_tensor], eval_interval_secs=eval_interval_secs, hooks=hooks)\n', (10274, 10458), True, 'import tensorflow as tf\n'), ((2435, 2471), 'tensorflow.train.get_or_create_global_step', 'tf.train.get_or_create_global_step', ([], {}), '()\n', (2469, 2471), True, 'import tensorflow as tf\n'), ((2956, 3004), 'tensorflow.train.global_step', 'tf.train.global_step', (['session', 'self._global_step'], {}), '(session, self._global_step)\n', (2976, 3004), True, 'import tensorflow as tf\n'), ((6253, 6289), 'tensorflow.train.get_or_create_global_step', 'tf.train.get_or_create_global_step', ([], {}), '()\n', (6287, 6289), True, 'import tensorflow as tf\n'), ((6796, 6844), 'tensorflow.train.global_step', 'tf.train.global_step', (['session', 'self._global_step'], {}), '(session, self._global_step)\n', (6816, 6844), True, 'import tensorflow as tf\n'), ((9270, 9322), 'numpy.ceil', 'np.ceil', (['(num_eval_examples / model.config.batch_size)'], {}), '(num_eval_examples / model.config.batch_size)\n', (9277, 9322), True, 'import numpy as np\n'), ((9607, 9664), 'tensorflow.contrib.training.StopAfterNEvalsHook', 'tf.contrib.training.StopAfterNEvalsHook', (['num_eval_batches'], {}), '(num_eval_batches)\n', (9646, 9664), True, 'import tensorflow as tf\n'), ((9707, 9761), 'tensorflow.contrib.training.SummaryAtEndHook', 'tf.contrib.training.SummaryAtEndHook', ([], {'log_dir': 'eval_dir'}), '(log_dir=eval_dir)\n', (9743, 9761), True, 'import tensorflow as tf\n'), ((2806, 2851), 'tensorflow.summary.FileWriterCache.get', 'tf.summary.FileWriterCache.get', (['self._log_dir'], {}), '(self._log_dir)\n', (2836, 2851), True, 'import tensorflow as tf\n'), ((3081, 3193), 'tensorflow.logging.info', 'tf.logging.info', (['"""Skipping perplexity evaluation: global step = %d < %d"""', 'global_step', 'self._min_global_step'], {}), "('Skipping perplexity evaluation: global step = %d < %d',\n global_step, self._min_global_step)\n", (3096, 3193), True, 'import tensorflow as tf\n'), ((3354, 3427), 'tensorflow.train.SessionRunArgs', 'tf.train.SessionRunArgs', (['[self._thought_vector_tensor, self._mask_tensor]'], {}), '([self._thought_vector_tensor, self._mask_tensor])\n', (3377, 3427), True, 'import tensorflow as tf\n'), ((3649, 3688), 'numpy.linalg.norm', 'np.linalg.norm', (['thought_vectors'], {'axis': '(1)'}), '(thought_vectors, axis=1)\n', (3663, 3688), True, 'import numpy as np\n'), ((6646, 6691), 'tensorflow.summary.FileWriterCache.get', 'tf.summary.FileWriterCache.get', (['self._log_dir'], {}), '(self._log_dir)\n', (6676, 6691), True, 'import tensorflow as tf\n'), ((6921, 7033), 'tensorflow.logging.info', 'tf.logging.info', (['"""Skipping perplexity evaluation: global step = %d < %d"""', 'global_step', 'self._min_global_step'], {}), "('Skipping perplexity evaluation: global step = %d < %d',\n global_step, self._min_global_step)\n", (6936, 7033), True, 'import tensorflow as tf\n'), ((7194, 7262), 'tensorflow.train.SessionRunArgs', 'tf.train.SessionRunArgs', (['[self._losses_tensor, self._weights_tensor]'], {}), '([self._losses_tensor, self._weights_tensor])\n', (7217, 7262), True, 'import tensorflow as tf\n'), ((7417, 7441), 'numpy.sum', 'np.sum', (['(losses * weights)'], {}), '(losses * weights)\n', (7423, 7441), True, 'import numpy as np\n'), ((7469, 7484), 'numpy.sum', 'np.sum', (['weights'], {}), '(weights)\n', (7475, 7484), True, 'import numpy as np\n'), ((7640, 7688), 'tensorflow.logging.info', 'tf.logging.info', (['"""Perplexity = %.4f"""', 'perplexity'], {}), "('Perplexity = %.4f', perplexity)\n", (7655, 7688), True, 'import tensorflow as tf\n'), ((7588, 7632), 'numpy.exp', 'np.exp', (['(self._sum_losses / self._sum_weights)'], {}), '(self._sum_losses / self._sum_weights)\n', (7594, 7632), True, 'import numpy as np\n'), ((7806, 7818), 'tensorflow.Summary', 'tf.Summary', ([], {}), '()\n', (7816, 7818), True, 'import tensorflow as tf\n'), ((8065, 8113), 'tensorflow.train.global_step', 'tf.train.global_step', (['session', 'self._global_step'], {}), '(session, self._global_step)\n', (8085, 8113), True, 'import tensorflow as tf\n'), ((4291, 4361), 'tensorflow.logging.info', 'tf.logging.info', (['"""Length %d Average Norm = %.4f"""', 'length', 'average_norm'], {}), "('Length %d Average Norm = %.4f', length, average_norm)\n", (4306, 4361), True, 'import tensorflow as tf\n'), ((4458, 4470), 'tensorflow.Summary', 'tf.Summary', ([], {}), '()\n', (4468, 4470), True, 'import tensorflow as tf\n'), ((4647, 4695), 'tensorflow.train.global_step', 'tf.train.global_step', (['session', 'self._global_step'], {}), '(session, self._global_step)\n', (4667, 4695), True, 'import tensorflow as tf\n')]
|
import numpy as np
from abc import ABC, abstractmethod
from aos.state import State
class Metric(ABC):
"""
A class to represent a metric of the optical state.
Eventually will have non-trivial image quality metrics ...
"""
@abstractmethod
def evaluate(self, x):
"""
Evaluates the metric on state x.
Parameters
----------
x : aos.state.State | numpy.ndarray[float]
Optical state.
"""
pass
class SumOfSquares(Metric):
"""
A sum of squares metric.
"""
def evaluate(self, x):
"""
Computes the sum of squares of state x.
Parameters
----------
x : aos.state.State | numpy.ndarray[float]
Optical state.
Returns
-------
float
Sum of squares.
"""
if isinstance(x, State):
x = x.array
return np.sum(x ** 2)
|
[
"numpy.sum"
] |
[((920, 934), 'numpy.sum', 'np.sum', (['(x ** 2)'], {}), '(x ** 2)\n', (926, 934), True, 'import numpy as np\n')]
|
from lxml import html as lh
import requests
import pandas as pd
from datetime import datetime
import matplotlib.pyplot as plt
import numpy as np
from io import StringIO
from scipy import optimize
from scipy.optimize import curve_fit
# define your function:
def gauss_func(x, height, mu, sigma): return height * np.exp(-((x-mu)**2/(2*sigma**2)))
def line_func(x, m, b): return (m * x) + b
def fetch_data_from_data_dot_gouv_website(data_url):
page = requests.get(data_url)
# Store the contents of the website under doc
doc = lh.fromstring(page.content)
filename_element = doc.xpath('/html/body/section/main/section[4]/div[2]/div[1]/article[2]/div/header/div[2]/h4')
filename = filename_element[0].text.split('-')
print(filename)
# current_data_date = datetime.strptime("".join(filename[3:7]), '%Y%m%d%Hh%M')
csv_link_element = doc.xpath('/html/body/section/main/section[4]/div[2]/div[1]/article[3]/div/section/dl/div[2]/dd/a')
csv_link = csv_link_element[0].attrib['href']
# if (max_saved_date + pd.Timedelta('0 days')) < pd.to_datetime(datetime.today().strftime('%Y-%m-%d')):
with requests.Session() as s:
download = s.get(csv_link)
decoded_content = download.content.decode("utf-8")
df = pd.read_csv(StringIO(decoded_content), sep=';')
print(csv_link)
df.to_pickle('raw_hospitalizations.pkl')
return df
def gaussian_fit_data(s):
data = s.dropna().values
data_x = s.dropna().index.astype('int').values
popt, pcov = curve_fit(
gauss_func,
data_x,
data,
p0=[100, pd.Timestamp('2020-03-30').value, pd.to_timedelta('2 days').value]
)
return popt[0] * np.exp(-((s.index.astype('int') - popt[1]) ** 2 / (2 * popt[2] ** 2)))
# data_url = 'https://www.data.gouv.fr/fr/datasets/donnees-des-urgences-hospitalieres-et-de-sos-medecins-relatives-a-lepidemie-de-covid-19/'
data_url = 'https://www.data.gouv.fr/fr/datasets/donnees-hospitalieres-relatives-a-lepidemie-de-covid-19/'
raw = fetch_data_from_data_dot_gouv_website(data_url)
raw.jour = raw.jour.astype('datetime64')
raw.set_index('jour', inplace=True)
covid = pd.DataFrame()
covid['Paris_reanimation'] = raw.where(raw.sexe == 0)\
.where(raw.dep == '75')\
.rea.dropna()
covid['Marseille_reanimation'] = raw.where(raw.sexe == 0)\
.where(raw.dep == '13')\
.rea.dropna()
covid['Lyon_reanimation'] = raw.where(raw.sexe == 0)\
.where(raw.dep == '69')\
.rea.dropna()
covid['Savoie_reanimation'] = raw.where(raw.sexe == 0)\
.where(raw.dep == '74')\
.rea.dropna()
covid['Paris_hospital'] = raw.where(raw.sexe == 0)\
.where(raw.dep == '75')\
.hosp.dropna()
covid['Marseille_hospital'] = raw.where(raw.sexe == 0)\
.where(raw.dep == '13')\
.hosp.dropna()
covid['Lyon_hospital'] = raw.where(raw.sexe == 0)\
.where(raw.dep == '69')\
.hosp.dropna()
covid['Savoie_hospital'] = raw.where(raw.sexe == 0)\
.where(raw.dep == '74')\
.hosp.dropna()
# covid['Lyon'] = raw.where(raw.sexe == 0)\
# .where(raw.dep == '69')\
# .rad.dropna().diff()
covid['France'] = raw.where(raw.sexe == 0).dropna().hosp.resample('D').sum().diff()
# covid.reset_index(inplace=True)
# covid = covid.reindex(index=pd.date_range('3-18-2020', '8-1-2020'))
lockdown_start = pd.to_datetime('3-16-2020')
lockdown_end = pd.to_datetime('5-10-2020')
# lockdown_end = pd.to_datetime(datetime.today().strftime('%Y-%m-%d'))
lockdown2_start = pd.to_datetime('10-30-2020')
lockdown2_end = pd.to_datetime('12-1-2020')
lockdown3_start = pd.to_datetime('4-3-2021')
lockdown3_end = pd.to_datetime('4-25-2021')
title = "Currently in Hospital"
reanimations = ['Paris_reanimation', 'Marseille_reanimation', 'Lyon_reanimation', 'Savoie_reanimation']
hospitals = ['Paris_hospital', 'Marseille_hospital', 'Lyon_hospital', 'Savoie_hospital']
axes = covid.plot(y=reanimations, legend=True,
title=title, grid=True, figsize=(20, 15), subplots=True, sharex=True, sharey=False)
hospital = 0
for ax1 in axes:
ax1.axvspan(lockdown_start, lockdown_end, facecolor='0.1', alpha=0.2)
ax1.axvspan(lockdown2_start, lockdown2_end, facecolor='0.1', alpha=0.2)
ax1.axvspan(lockdown3_start, lockdown3_end, facecolor='0.1', alpha=0.2)
covid.plot(y=hospitals[hospital], secondary_y=True, legend=True, ax=ax1, lw=5, sharey=True)#.\
hospital += 1
# get_figure().savefig('hospitalizations.png')
plt.tight_layout
plt.savefig('hospitalizations.png')
# covid.plot(style='k--', y=['Paris_fit', 'Bordeaux_fit', 'Strasbourg_fit', 'Marseilles_fit', 'Lyon_fit'], ax=ax1, legend=False).\
# get_figure().savefig('hospitalizations.png')
# covid.plot(style='k--', y=['France_fit', ], secondary_y=True, ax=ax1, legend=False)
# covid.plot(y="France", legend=True, secondary_y=True, ax=ax1, lw=4, grid=True, style='r').get_figure().savefig('hospitalizations.png')
# covid.plot(y=["Paris_fit"], style='.', ax=ax1)
# raw.where(raw.sursaud_cl_age_corona == '0')\
# .where(raw.dep == '75')\
# .nbre_hospit_corona.dropna()\
# .cumsum()\
# .plot(
# label='Paris',
# # kind='bar',
# legend=True,
# figsize=(20, 15),
# title='Hospitalizations per day'
# )
#
# raw.where(raw.sursaud_cl_age_corona == '0')\
# .where(raw.dep == '13')\
# .nbre_hospit_corona.dropna()\
# .plot(
# label='Marseilles',
# secondary_y=True,
# legend=True
# ).get_figure().savefig('hospitalizations.png')
|
[
"pandas.DataFrame",
"io.StringIO",
"pandas.Timestamp",
"requests.Session",
"lxml.html.fromstring",
"pandas.to_timedelta",
"pandas.to_datetime",
"numpy.exp",
"requests.get",
"matplotlib.pyplot.savefig"
] |
[((2141, 2155), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (2153, 2155), True, 'import pandas as pd\n'), ((3281, 3308), 'pandas.to_datetime', 'pd.to_datetime', (['"""3-16-2020"""'], {}), "('3-16-2020')\n", (3295, 3308), True, 'import pandas as pd\n'), ((3324, 3351), 'pandas.to_datetime', 'pd.to_datetime', (['"""5-10-2020"""'], {}), "('5-10-2020')\n", (3338, 3351), True, 'import pandas as pd\n'), ((3442, 3470), 'pandas.to_datetime', 'pd.to_datetime', (['"""10-30-2020"""'], {}), "('10-30-2020')\n", (3456, 3470), True, 'import pandas as pd\n'), ((3487, 3514), 'pandas.to_datetime', 'pd.to_datetime', (['"""12-1-2020"""'], {}), "('12-1-2020')\n", (3501, 3514), True, 'import pandas as pd\n'), ((3534, 3560), 'pandas.to_datetime', 'pd.to_datetime', (['"""4-3-2021"""'], {}), "('4-3-2021')\n", (3548, 3560), True, 'import pandas as pd\n'), ((3577, 3604), 'pandas.to_datetime', 'pd.to_datetime', (['"""4-25-2021"""'], {}), "('4-25-2021')\n", (3591, 3604), True, 'import pandas as pd\n'), ((4414, 4449), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""hospitalizations.png"""'], {}), "('hospitalizations.png')\n", (4425, 4449), True, 'import matplotlib.pyplot as plt\n'), ((460, 482), 'requests.get', 'requests.get', (['data_url'], {}), '(data_url)\n', (472, 482), False, 'import requests\n'), ((543, 570), 'lxml.html.fromstring', 'lh.fromstring', (['page.content'], {}), '(page.content)\n', (556, 570), True, 'from lxml import html as lh\n'), ((314, 357), 'numpy.exp', 'np.exp', (['(-((x - mu) ** 2 / (2 * sigma ** 2)))'], {}), '(-((x - mu) ** 2 / (2 * sigma ** 2)))\n', (320, 357), True, 'import numpy as np\n'), ((1132, 1150), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1148, 1150), False, 'import requests\n'), ((1268, 1293), 'io.StringIO', 'StringIO', (['decoded_content'], {}), '(decoded_content)\n', (1276, 1293), False, 'from io import StringIO\n'), ((1587, 1613), 'pandas.Timestamp', 'pd.Timestamp', (['"""2020-03-30"""'], {}), "('2020-03-30')\n", (1599, 1613), True, 'import pandas as pd\n'), ((1621, 1646), 'pandas.to_timedelta', 'pd.to_timedelta', (['"""2 days"""'], {}), "('2 days')\n", (1636, 1646), True, 'import pandas as pd\n')]
|
import numpy as np
from copy import deepcopy
from model.root.root_algo import RootAlgo
class BaseGA(RootAlgo):
"""
Link:
https://blog.sicara.com/getting-started-genetic-algorithms-python-tutorial-81ffa1dd72f9
https://www.tutorialspoint.com/genetic_algorithms/genetic_algorithms_quick_guide.htm
https://www.analyticsvidhya.com/blog/2017/07/introduction-to-genetic-algorithm/
"""
ID_FITNESS = 1
def __init__(self, root_algo_paras=None, ga_paras = None):
RootAlgo.__init__(self, root_algo_paras)
self.epoch = ga_paras["epoch"]
self.pop_size = ga_paras["pop_size"]
self.pc = ga_paras["pc"]
self.pm = ga_paras["pm"]
### Selection
def _get_index_roulette_wheel_selection__(self, list_fitness, sum_fitness):
r = np.random.uniform(low=0, high=sum_fitness)
for idx, f in enumerate(list_fitness):
r = r + f
if r > sum_fitness:
return idx
### Crossover
def _crossover_arthmetic_recombination__(self, dad=None, mom=None):
r = np.random.uniform() # w1 = w2 when r =0.5
w1 = np.multiply(r, dad) + np.multiply((1 - r), mom)
w2 = np.multiply(r, mom) + np.multiply((1 - r), dad)
return w1, w2
### Mutation
def _mutation_flip_point__(self, parent, index):
w = deepcopy(parent)
w[index] = np.random.uniform(self.domain_range[0], self.domain_range[1])
return w
def _create_next_generation__(self, pop):
next_population = []
list_fitness = [pop[i][self.ID_FITNESS][self.ID_ERROR] for i in range(self.pop_size)]
fitness_sum = sum(list_fitness)
while (len(next_population) < self.pop_size):
### Selection
c1 = pop[self._get_index_roulette_wheel_selection__(list_fitness, fitness_sum)]
c2 = pop[self._get_index_roulette_wheel_selection__(list_fitness, fitness_sum)]
w1, w2 = deepcopy(c1[0]), deepcopy(c2[0])
### Crossover
if np.random.uniform() < self.pc:
w1, w2 = self._crossover_arthmetic_recombination__(c1[0], c2[0])
### Mutation
for id in range(0, self.problem_size):
if np.random.uniform() < self.pm:
w1 = self._mutation_flip_point__(w1, id)
if np.random.uniform() < self.pm:
w2 = self._mutation_flip_point__(w2, id)
c1_new = [w1, self._fitness_model__(w1, minmax=1)]
c2_new = [w2, self._fitness_model__(w2, minmax=1)]
next_population.append(c1_new)
next_population.append(c2_new)
return next_population
def _train__(self):
best_train = [None, [-1, -1] ]
pop = [ self._create_solution__(minmax=1) for _ in range(self.pop_size) ]
for j in range(0, self.epoch):
# Next generations
pop = deepcopy(self._create_next_generation__(pop))
current_best = self._get_global_best__(pop=pop, id_fitness=self.ID_FITNESS, id_best=self.ID_MAX_PROBLEM)
if current_best[self.ID_FITNESS][self.ID_ERROR] > best_train[self.ID_FITNESS][self.ID_ERROR]:
best_train = current_best
if self.print_train:
print("> Epoch {0}: Best training fitness {1}".format(j + 1, 1.0 / best_train[self.ID_FITNESS][self.ID_ERROR]))
self.loss_train.append(np.power(best_train[self.ID_FITNESS], -1))
return best_train[0], self.loss_train
|
[
"numpy.random.uniform",
"copy.deepcopy",
"numpy.multiply",
"numpy.power",
"model.root.root_algo.RootAlgo.__init__"
] |
[((506, 546), 'model.root.root_algo.RootAlgo.__init__', 'RootAlgo.__init__', (['self', 'root_algo_paras'], {}), '(self, root_algo_paras)\n', (523, 546), False, 'from model.root.root_algo import RootAlgo\n'), ((808, 850), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0)', 'high': 'sum_fitness'}), '(low=0, high=sum_fitness)\n', (825, 850), True, 'import numpy as np\n'), ((1082, 1101), 'numpy.random.uniform', 'np.random.uniform', ([], {}), '()\n', (1099, 1101), True, 'import numpy as np\n'), ((1363, 1379), 'copy.deepcopy', 'deepcopy', (['parent'], {}), '(parent)\n', (1371, 1379), False, 'from copy import deepcopy\n'), ((1399, 1460), 'numpy.random.uniform', 'np.random.uniform', (['self.domain_range[0]', 'self.domain_range[1]'], {}), '(self.domain_range[0], self.domain_range[1])\n', (1416, 1460), True, 'import numpy as np\n'), ((1149, 1168), 'numpy.multiply', 'np.multiply', (['r', 'dad'], {}), '(r, dad)\n', (1160, 1168), True, 'import numpy as np\n'), ((1171, 1194), 'numpy.multiply', 'np.multiply', (['(1 - r)', 'mom'], {}), '(1 - r, mom)\n', (1182, 1194), True, 'import numpy as np\n'), ((1210, 1229), 'numpy.multiply', 'np.multiply', (['r', 'mom'], {}), '(r, mom)\n', (1221, 1229), True, 'import numpy as np\n'), ((1232, 1255), 'numpy.multiply', 'np.multiply', (['(1 - r)', 'dad'], {}), '(1 - r, dad)\n', (1243, 1255), True, 'import numpy as np\n'), ((1975, 1990), 'copy.deepcopy', 'deepcopy', (['c1[0]'], {}), '(c1[0])\n', (1983, 1990), False, 'from copy import deepcopy\n'), ((1992, 2007), 'copy.deepcopy', 'deepcopy', (['c2[0]'], {}), '(c2[0])\n', (2000, 2007), False, 'from copy import deepcopy\n'), ((2049, 2068), 'numpy.random.uniform', 'np.random.uniform', ([], {}), '()\n', (2066, 2068), True, 'import numpy as np\n'), ((3447, 3488), 'numpy.power', 'np.power', (['best_train[self.ID_FITNESS]', '(-1)'], {}), '(best_train[self.ID_FITNESS], -1)\n', (3455, 3488), True, 'import numpy as np\n'), ((2257, 2276), 'numpy.random.uniform', 'np.random.uniform', ([], {}), '()\n', (2274, 2276), True, 'import numpy as np\n'), ((2368, 2387), 'numpy.random.uniform', 'np.random.uniform', ([], {}), '()\n', (2385, 2387), True, 'import numpy as np\n')]
|
from __future__ import absolute_import
import os
import numpy as np
import tensorflow as tf
from experiments.scripts.pickle_wrapper import save_pkl, load_pkl
from .ops import simple_linear, select_action_tf, clipped_error
from .alpha_vector import AlphaVector
from .base_tf_solver import BaseTFSolver
class LinearAlphaNet(BaseTFSolver):
"""
Linear Alpha Network
- linear FA for alpha vectors
- 6 inputs (r(s,a))
- 6 outputs (1 hyperplane per action)
"""
def __init__(self, agent, sess):
super(LinearAlphaNet, self).__init__(agent, sess)
self.ops = {}
self.w = {}
self.summary_ops = {}
self.summary_placeholders = {}
self.w_input = {}
self.w_assign_op = {}
self.build_linear_network()
with tf.variable_scope('step'):
self.step_op = tf.Variable(0, trainable=False, name='step')
self.step_input = tf.placeholder('int32', None, name='step_input')
self.step_assign_op = self.step_op.assign(self.step_input)
tf.global_variables_initializer().run()
@staticmethod
def reset(agent, sess):
return LinearAlphaNet(agent, sess)
def train(self, epoch):
start_step = self.step_assign_op.eval({self.step_input: epoch * self.model.max_steps})
total_reward, avg_reward_per_step, total_loss, total_v, total_delta = 0., 0., 0., 0., 0.
actions = []
# Reset for new run
belief = self.model.get_initial_belief_state()
for step in range(start_step, start_step + self.model.max_steps):
# 1. predict
action, pred_v = self.e_greedy_predict(belief, step)
# 2. act
step_result = self.model.generate_step(action)
if step_result.is_terminal:
v_b_next = np.array([0.])
else:
next_belief = self.model.belief_update(belief, action, step_result.observation)
# optionally clip reward
# generate target
_, v_b_next = self.greedy_predict(next_belief)
target_v = self.model.discount * (step_result.reward + v_b_next)
# compute gradient and do weight update
_, loss, delta = self.gradients(target_v, belief, step)
total_loss += loss
total_reward += step_result.reward
total_v += pred_v[0]
total_delta += delta[0]
if step_result.is_terminal:
# Reset for new run
belief = self.model.get_initial_belief_state()
actions.append(action)
avg_reward_per_step = total_reward / (step + 1.)
avg_loss = loss / (step + 1.)
avg_v = total_v / (step + 1.)
avg_delta = total_delta / (step + 1.)
self.step_assign_op.eval({self.step_input: step + 1})
self.inject_summary({
'average.reward': avg_reward_per_step,
'average.loss': avg_loss,
'average.v': avg_v,
'average.delta': avg_delta,
'training.weights': self.sess.run(self.w['l1_w'], feed_dict={
self.ops['l0_in']: np.reshape(self.model.get_reward_matrix().flatten(), [1, 6]),
self.ops['belief']: belief}),
'training.learning_rate': self.ops['learning_rate_op'].eval(
{self.ops['learning_rate_step']: step + 1}),
'training.epsilon': self.ops['epsilon_op'].eval(
{self.ops['epsilon_step']: step + 1})
}, step + 1)
def e_greedy_predict(self, belief, epsilon_step):
# try hard-coding input of linear net to be rewards (can try random as well)
action, v_b, epsilon = self.sess.run([self.ops['a'], self.ops['v_b'], self.ops['epsilon_op']],
feed_dict={
self.ops['l0_in']: np.reshape(self.model.get_reward_matrix().flatten(), [1, 6]),
self.ops['belief']: belief,
self.ops['epsilon_step']: epsilon_step})
# e-greedy action selection
if np.random.uniform(0, 1) < epsilon:
action = np.random.randint(self.model.num_actions)
return action, v_b
def greedy_predict(self, belief):
# try hard-coding input of linear net to be rewards (can try random as well)
action, v_b = self.sess.run([self.ops['a'], self.ops['v_b']],
feed_dict={
self.ops['l0_in']: np.reshape(self.model.get_reward_matrix().flatten(), [1, 6]),
self.ops['belief']: belief})
return action, v_b
def gradients(self, target_v, belief, learning_rate_step):
return self.sess.run([self.ops['optim'], self.ops['loss'], self.ops['delta']], feed_dict={
self.ops['target_v']: target_v,
self.ops['l0_in']: np.reshape(self.model.get_reward_matrix().flatten(), [1, 6]),
self.ops['belief']: belief,
self.ops['learning_rate_step']: learning_rate_step})
def alpha_vectors(self):
gamma = self.sess.run(self.ops['l1_out'], feed_dict={
self.ops['l0_in']: np.reshape(self.model.get_reward_matrix().flatten(), [1, 6]),
self.ops['belief']: self.model.get_initial_belief_state()
})
gamma = np.reshape(gamma, [self.model.num_actions, self.model.num_states])
vector_set = set()
for i in range(self.model.num_actions):
vector_set.add(AlphaVector(a=i, v=gamma[i]))
return vector_set
def build_linear_network(self):
with tf.variable_scope('linear_fa_prediction'):
self.ops['belief'] = tf.placeholder('float32', [self.model.num_states], name='belief_input')
with tf.name_scope('linear_layer'):
self.ops['l0_in'] = tf.placeholder('float32', [1, self.model.num_states *
self.model.num_actions], name='input')
self.ops['l1_out'], self.w['l1_w'], self.w['l1_b'] = simple_linear(self.ops['l0_in'],
activation_fn=None, name='weights')
self.ops['l1_out'] = tf.reshape(self.ops['l1_out'], [self.model.num_actions,
self.model.num_states], name='output')
with tf.variable_scope('action_selection'):
vector_set = set()
for i in range(self.model.num_actions):
vector_set.add(AlphaVector(a=i, v=self.ops['l1_out'][i, :]))
self.ops['a'], self.ops['v_b'] = select_action_tf(self.ops['belief'], vector_set)
with tf.variable_scope('epsilon_greedy'):
self.ops['epsilon_step'] = tf.placeholder('int64', None, name='epsilon_step')
self.ops['epsilon_op'] = tf.maximum(self.model.epsilon_minimum,
tf.train.exponential_decay(
self.model.epsilon_start,
self.ops['epsilon_step'],
self.model.epsilon_decay_step,
self.model.epsilon_decay,
staircase=True))
with tf.variable_scope('linear_optimizer'):
# MSE loss function
self.ops['target_v'] = tf.placeholder('float32', [None], name='target_v')
self.ops['delta'] = self.ops['target_v'] - self.ops['v_b']
# self.ops['clipped_delta'] = tf.clip_by_value(self.ops['delta'], -1, 1, name='clipped_delta')
# L2 regularization
self.ops['loss'] = tf.reduce_mean(clipped_error(self.ops['delta']) +
self.model.beta * tf.nn.l2_loss(self.w['l1_w']) +
self.model.beta * tf.nn.l2_loss(self.w['l1_b']), name='loss')
self.ops['learning_rate_step'] = tf.placeholder('int64', None, name='learning_rate_step')
self.ops['learning_rate_op'] = tf.maximum(self.model.learning_rate_minimum,
tf.train.exponential_decay(
self.model.learning_rate,
self.ops['learning_rate_step'],
self.model.learning_rate_decay_step,
self.model.learning_rate_decay,
staircase=True))
self.ops['optim'] = tf.train.MomentumOptimizer(self.ops['learning_rate_op'], momentum=0.8,
name='Optimizer'). \
minimize(self.ops['loss'])
with tf.variable_scope('linear_fa_summary'):
scalar_summary_tags = ['average.reward', 'average.loss', 'average.v',
'average.delta', 'training.learning_rate', 'training.epsilon']
for tag in scalar_summary_tags:
self.summary_placeholders[tag] = tf.placeholder('float32', None, name=tag.replace(' ', '_'))
self.summary_ops['{}'.format(tag)] = tf.summary.scalar('{}'.format(tag),
self.summary_placeholders[tag])
self.summary_placeholders['training.weights'] = tf.placeholder('float32', [1, 6],
name='training_weights')
self.summary_ops['training.weights'] = tf.summary.histogram('weights',
self.summary_placeholders['training.weights'])
self.summary_ops['writer'] = tf.summary.FileWriter(self.model.logs, self.sess.graph)
self.summary_ops['saver'] = tf.train.Saver(self.w, max_to_keep=30)
self.load_model()
def inject_summary(self, tag_dict, step):
summary_str_lists = self.sess.run([self.summary_ops['{}'.format(tag)] for tag in tag_dict.keys()], feed_dict={
self.summary_placeholders[tag]: value for tag, value in tag_dict.items()
})
for summary_str in summary_str_lists:
self.summary_ops['writer'].add_summary(summary_str, step)
def save_weight_to_pkl(self):
if not os.path.exists(self.model.weight_dir):
os.makedirs(self.model.weight_dir)
for name in self.w.keys():
save_pkl(self.w[name].eval(), os.path.join(self.model.weight_dir, "%s.pkl" % name))
def load_weight_from_pkl(self):
with tf.variable_scope('load_pred_from_pkl'):
for name in self.w.keys():
self.w_input[name] = tf.placeholder('float32', self.w[name].get_shape().as_list(), name=name)
self.w_assign_op[name] = self.w[name].assign(self.w_input[name])
for name in self.w.keys():
self.w_assign_op[name].eval({self.w_input[name]: load_pkl(os.path.join(self.model.weight_dir, "%s.pkl" % name))})
def save_alpha_vectors(self):
if not os.path.exists(self.model.weight_dir):
os.makedirs(self.model.weight_dir)
av = self.alpha_vectors()
save_pkl(av, os.path.join(self.model.weight_dir, "linear_alpha_net_vectors.pkl"))
|
[
"tensorflow.reshape",
"tensorflow.Variable",
"numpy.random.randint",
"os.path.join",
"os.path.exists",
"tensorflow.variable_scope",
"tensorflow.placeholder",
"tensorflow.summary.histogram",
"tensorflow.summary.FileWriter",
"numpy.reshape",
"tensorflow.name_scope",
"tensorflow.train.Saver",
"tensorflow.global_variables_initializer",
"tensorflow.train.MomentumOptimizer",
"tensorflow.train.exponential_decay",
"numpy.random.uniform",
"os.makedirs",
"numpy.array",
"tensorflow.nn.l2_loss"
] |
[((5324, 5390), 'numpy.reshape', 'np.reshape', (['gamma', '[self.model.num_actions, self.model.num_states]'], {}), '(gamma, [self.model.num_actions, self.model.num_states])\n', (5334, 5390), True, 'import numpy as np\n'), ((10170, 10208), 'tensorflow.train.Saver', 'tf.train.Saver', (['self.w'], {'max_to_keep': '(30)'}), '(self.w, max_to_keep=30)\n', (10184, 10208), True, 'import tensorflow as tf\n'), ((796, 821), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""step"""'], {}), "('step')\n", (813, 821), True, 'import tensorflow as tf\n'), ((850, 894), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'trainable': '(False)', 'name': '"""step"""'}), "(0, trainable=False, name='step')\n", (861, 894), True, 'import tensorflow as tf\n'), ((925, 973), 'tensorflow.placeholder', 'tf.placeholder', (['"""int32"""', 'None'], {'name': '"""step_input"""'}), "('int32', None, name='step_input')\n", (939, 973), True, 'import tensorflow as tf\n'), ((4120, 4143), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (4137, 4143), True, 'import numpy as np\n'), ((4176, 4217), 'numpy.random.randint', 'np.random.randint', (['self.model.num_actions'], {}), '(self.model.num_actions)\n', (4193, 4217), True, 'import numpy as np\n'), ((5599, 5640), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""linear_fa_prediction"""'], {}), "('linear_fa_prediction')\n", (5616, 5640), True, 'import tensorflow as tf\n'), ((5675, 5746), 'tensorflow.placeholder', 'tf.placeholder', (['"""float32"""', '[self.model.num_states]'], {'name': '"""belief_input"""'}), "('float32', [self.model.num_states], name='belief_input')\n", (5689, 5746), True, 'import tensorflow as tf\n'), ((7467, 7504), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""linear_optimizer"""'], {}), "('linear_optimizer')\n", (7484, 7504), True, 'import tensorflow as tf\n'), ((7573, 7623), 'tensorflow.placeholder', 'tf.placeholder', (['"""float32"""', '[None]'], {'name': '"""target_v"""'}), "('float32', [None], name='target_v')\n", (7587, 7623), True, 'import tensorflow as tf\n'), ((8167, 8223), 'tensorflow.placeholder', 'tf.placeholder', (['"""int64"""', 'None'], {'name': '"""learning_rate_step"""'}), "('int64', None, name='learning_rate_step')\n", (8181, 8223), True, 'import tensorflow as tf\n'), ((9076, 9114), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""linear_fa_summary"""'], {}), "('linear_fa_summary')\n", (9093, 9114), True, 'import tensorflow as tf\n'), ((9703, 9761), 'tensorflow.placeholder', 'tf.placeholder', (['"""float32"""', '[1, 6]'], {'name': '"""training_weights"""'}), "('float32', [1, 6], name='training_weights')\n", (9717, 9761), True, 'import tensorflow as tf\n'), ((9885, 9963), 'tensorflow.summary.histogram', 'tf.summary.histogram', (['"""weights"""', "self.summary_placeholders['training.weights']"], {}), "('weights', self.summary_placeholders['training.weights'])\n", (9905, 9963), True, 'import tensorflow as tf\n'), ((10077, 10132), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['self.model.logs', 'self.sess.graph'], {}), '(self.model.logs, self.sess.graph)\n', (10098, 10132), True, 'import tensorflow as tf\n'), ((10668, 10705), 'os.path.exists', 'os.path.exists', (['self.model.weight_dir'], {}), '(self.model.weight_dir)\n', (10682, 10705), False, 'import os\n'), ((10719, 10753), 'os.makedirs', 'os.makedirs', (['self.model.weight_dir'], {}), '(self.model.weight_dir)\n', (10730, 10753), False, 'import os\n'), ((10936, 10975), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""load_pred_from_pkl"""'], {}), "('load_pred_from_pkl')\n", (10953, 10975), True, 'import tensorflow as tf\n'), ((11419, 11456), 'os.path.exists', 'os.path.exists', (['self.model.weight_dir'], {}), '(self.model.weight_dir)\n', (11433, 11456), False, 'import os\n'), ((11470, 11504), 'os.makedirs', 'os.makedirs', (['self.model.weight_dir'], {}), '(self.model.weight_dir)\n', (11481, 11504), False, 'import os\n'), ((11561, 11628), 'os.path.join', 'os.path.join', (['self.model.weight_dir', '"""linear_alpha_net_vectors.pkl"""'], {}), "(self.model.weight_dir, 'linear_alpha_net_vectors.pkl')\n", (11573, 11628), False, 'import os\n'), ((1054, 1087), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (1085, 1087), True, 'import tensorflow as tf\n'), ((1824, 1839), 'numpy.array', 'np.array', (['[0.0]'], {}), '([0.0])\n', (1832, 1839), True, 'import numpy as np\n'), ((5765, 5794), 'tensorflow.name_scope', 'tf.name_scope', (['"""linear_layer"""'], {}), "('linear_layer')\n", (5778, 5794), True, 'import tensorflow as tf\n'), ((5832, 5929), 'tensorflow.placeholder', 'tf.placeholder', (['"""float32"""', '[1, self.model.num_states * self.model.num_actions]'], {'name': '"""input"""'}), "('float32', [1, self.model.num_states * self.model.\n num_actions], name='input')\n", (5846, 5929), True, 'import tensorflow as tf\n'), ((6247, 6346), 'tensorflow.reshape', 'tf.reshape', (["self.ops['l1_out']", '[self.model.num_actions, self.model.num_states]'], {'name': '"""output"""'}), "(self.ops['l1_out'], [self.model.num_actions, self.model.\n num_states], name='output')\n", (6257, 6346), True, 'import tensorflow as tf\n'), ((6429, 6466), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""action_selection"""'], {}), "('action_selection')\n", (6446, 6466), True, 'import tensorflow as tf\n'), ((6756, 6791), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""epsilon_greedy"""'], {}), "('epsilon_greedy')\n", (6773, 6791), True, 'import tensorflow as tf\n'), ((6836, 6886), 'tensorflow.placeholder', 'tf.placeholder', (['"""int64"""', 'None'], {'name': '"""epsilon_step"""'}), "('int64', None, name='epsilon_step')\n", (6850, 6886), True, 'import tensorflow as tf\n'), ((8366, 8545), 'tensorflow.train.exponential_decay', 'tf.train.exponential_decay', (['self.model.learning_rate', "self.ops['learning_rate_step']", 'self.model.learning_rate_decay_step', 'self.model.learning_rate_decay'], {'staircase': '(True)'}), "(self.model.learning_rate, self.ops[\n 'learning_rate_step'], self.model.learning_rate_decay_step, self.model.\n learning_rate_decay, staircase=True)\n", (8392, 8545), True, 'import tensorflow as tf\n'), ((10832, 10884), 'os.path.join', 'os.path.join', (['self.model.weight_dir', "('%s.pkl' % name)"], {}), "(self.model.weight_dir, '%s.pkl' % name)\n", (10844, 10884), False, 'import os\n'), ((7019, 7180), 'tensorflow.train.exponential_decay', 'tf.train.exponential_decay', (['self.model.epsilon_start', "self.ops['epsilon_step']", 'self.model.epsilon_decay_step', 'self.model.epsilon_decay'], {'staircase': '(True)'}), "(self.model.epsilon_start, self.ops[\n 'epsilon_step'], self.model.epsilon_decay_step, self.model.\n epsilon_decay, staircase=True)\n", (7045, 7180), True, 'import tensorflow as tf\n'), ((8861, 8954), 'tensorflow.train.MomentumOptimizer', 'tf.train.MomentumOptimizer', (["self.ops['learning_rate_op']"], {'momentum': '(0.8)', 'name': '"""Optimizer"""'}), "(self.ops['learning_rate_op'], momentum=0.8, name\n ='Optimizer')\n", (8887, 8954), True, 'import tensorflow as tf\n'), ((8077, 8106), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (["self.w['l1_b']"], {}), "(self.w['l1_b'])\n", (8090, 8106), True, 'import tensorflow as tf\n'), ((11313, 11365), 'os.path.join', 'os.path.join', (['self.model.weight_dir', "('%s.pkl' % name)"], {}), "(self.model.weight_dir, '%s.pkl' % name)\n", (11325, 11365), False, 'import os\n'), ((7981, 8010), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (["self.w['l1_w']"], {}), "(self.w['l1_w'])\n", (7994, 8010), True, 'import tensorflow as tf\n')]
|
import kwplot
import numpy as np
import kwimage
kwplot.autompl()
f = 8
blank_key = np.zeros((64 * f, 54 * f, 3))
blank_key[:, :, :] = np.array(kwimage.Color('darkgray').as255())[None, None, :]
blank_key[0:f * 2, :] = (3, 3, 3)
blank_key[-f * 2:, :] = (3, 3, 3)
blank_key[:, 0:f * 2] = (3, 3, 3)
blank_key[:, -f * 2:] = (3, 3, 3)
key = kwimage.draw_text_on_image(blank_key.copy(), text='!\n1', halign='center', valign='center', color='white')
kwplot.imshow(key)
tab_symbol = '->'
left_rows = []
alt_text0 = [None, None, None, None, None, None, None, None]
row_text0 = ['esc', 'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'caps']
left_rows += [(alt_text0, row_text0)]
alt_text1 = [None, '~', '!', '@', '#', '$', '%', None]
row_text1 = ['tab', '`', '1', '2', '3', '4', '5', 'win']
left_rows += [(alt_text1, row_text1)]
alt_text2 = ['|', '?', None, None, None, None, None, None]
row_text2 = ['\\', '/', 'q', 'w', 'e', 'r', 't', 'del']
left_rows += [(alt_text2, row_text2)]
alt_text3 = [None, None, None, None, None, None, None, None]
row_text3 = ['shift', 'shift', 'a', 's', 'd', 'f', 'g', 'tab']
left_rows += [(alt_text3, row_text3)]
alt_text3 = [None, None, None, None, None, None, None, None]
row_text3 = ['ctrl', 'ctrl', 'z', 'x', 'c', 'v', 'b', 'bksp']
left_rows += [(alt_text3, row_text3)]
alt_text4 = [None, None, None, None, None, None, None, None]
row_text4 = ['alt', 'home', 'pup', 'end', 'pdwn', 'end', 'space', 'enter']
left_rows += [(alt_text4, row_text4)]
row_stack = []
for alt_text, row_text in left_rows:
row_keys = []
for t, a in zip(row_text, alt_text):
if len(t) == 1:
fontScale = 4
thickness = 6
else:
fontScale = 1
thickness = 4
if a is None:
text = t
else:
text = a + '\n\n' + t
key = kwimage.draw_text_on_image(blank_key.copy(), text=text, halign='center', valign='center', color='white', fontScale=4, thickness=thickness)
row_keys.append(key)
row = kwimage.stack_images(row_keys, axis=1, pad=1)
row_stack.append(row)
left_side = kwimage.stack_images(row_stack, axis=0, pad=1)
right_rows = []
alt_text0 = [None, None, None, None, None, None, None, None]
row_text0 = ['Prt\nScn', 'F7', 'F8', 'F9', 'F10', 'F11', 'F12', 'DEL']
right_rows += [(alt_text0, row_text0)]
alt_text1 = [None, '^', '&', '*', '(', ')', '_', '+']
row_text1 = ['win', '6', '7', '8', '9', '0', '-', '=']
right_rows += [(alt_text1, row_text1)]
alt_text2 = [None, None, None, None, None, None, '{', '}']
row_text2 = ['del', 'y', 'u', 'i', 'o', 'p', '[', ']']
right_rows += [(alt_text2, row_text2)]
alt_text3 = [None, None, None, None, None, ':', None, None]
row_text3 = ['tab', 'h', 'j', 'k', 'l', ';', 'shift', 'shift']
right_rows += [(alt_text3, row_text3)]
alt_text3 = [None, None, None, '<', '>', '"', None, None]
row_text3 = ['bksp', 'n', 'm', ',', '.', '\'', 'ctrl', 'ctrl']
right_rows += [(alt_text3, row_text3)]
alt_text4 = [None, None, None, None, None, None, None, None]
row_text4 = ['enter', 'space', '<', '^', 'V', '>', 'alt', 'alt']
right_rows += [(alt_text4, row_text4)]
row_stack = []
for alt_text, row_text in right_rows:
row_keys = []
for t, a in zip(row_text, alt_text):
if len(t) == 1:
fontScale = 4
thickness = 6
else:
fontScale = 1
thickness = 4
if a is None:
text = t
else:
text = a + '\n\n' + t
key = kwimage.draw_text_on_image(blank_key.copy(), text=text, halign='center', valign='center', color='white', fontScale=4, thickness=thickness)
row_keys.append(key)
row = kwimage.stack_images(row_keys, axis=1, pad=1)
row_stack.append(row)
right_side = kwimage.stack_images(row_stack, axis=0, pad=1)
image = kwimage.stack_images([left_side, right_side], axis=1, pad=300)
kwplot.imshow(image)
|
[
"kwplot.imshow",
"kwplot.autompl",
"numpy.zeros",
"kwimage.Color",
"kwimage.stack_images"
] |
[((48, 64), 'kwplot.autompl', 'kwplot.autompl', ([], {}), '()\n', (62, 64), False, 'import kwplot\n'), ((84, 113), 'numpy.zeros', 'np.zeros', (['(64 * f, 54 * f, 3)'], {}), '((64 * f, 54 * f, 3))\n', (92, 113), True, 'import numpy as np\n'), ((445, 463), 'kwplot.imshow', 'kwplot.imshow', (['key'], {}), '(key)\n', (458, 463), False, 'import kwplot\n'), ((2096, 2142), 'kwimage.stack_images', 'kwimage.stack_images', (['row_stack'], {'axis': '(0)', 'pad': '(1)'}), '(row_stack, axis=0, pad=1)\n', (2116, 2142), False, 'import kwimage\n'), ((3754, 3800), 'kwimage.stack_images', 'kwimage.stack_images', (['row_stack'], {'axis': '(0)', 'pad': '(1)'}), '(row_stack, axis=0, pad=1)\n', (3774, 3800), False, 'import kwimage\n'), ((3810, 3872), 'kwimage.stack_images', 'kwimage.stack_images', (['[left_side, right_side]'], {'axis': '(1)', 'pad': '(300)'}), '([left_side, right_side], axis=1, pad=300)\n', (3830, 3872), False, 'import kwimage\n'), ((3874, 3894), 'kwplot.imshow', 'kwplot.imshow', (['image'], {}), '(image)\n', (3887, 3894), False, 'import kwplot\n'), ((2011, 2056), 'kwimage.stack_images', 'kwimage.stack_images', (['row_keys'], {'axis': '(1)', 'pad': '(1)'}), '(row_keys, axis=1, pad=1)\n', (2031, 2056), False, 'import kwimage\n'), ((3668, 3713), 'kwimage.stack_images', 'kwimage.stack_images', (['row_keys'], {'axis': '(1)', 'pad': '(1)'}), '(row_keys, axis=1, pad=1)\n', (3688, 3713), False, 'import kwimage\n'), ((144, 169), 'kwimage.Color', 'kwimage.Color', (['"""darkgray"""'], {}), "('darkgray')\n", (157, 169), False, 'import kwimage\n')]
|
from __future__ import division, print_function
from glob import glob
from sklearn import svm, cross_validation
import numpy as np
import plot_utils as pu
import json
import sys
import fitsio
import pickle
import os
class Artifact(object):
def __init__(self, identifier, expname, ccd, problem, x, y):
self.ident = identifier
self.expname = expname
self.ccd = ccd
self.problem = problem
self.x = x
self.y = y
def load_release_artifacts(artifact_base):
files = glob(artifact_base+'*')
artifacts = []
for f in files:
with open(f, 'r') as fp:
arts = json.load(fp)
for art in arts:
if ~art['false_positive']:
ident = '_'.join([art['expname'].split('_')[-1],str(art['ccd'])])
oart = Artifact(ident, art['expname'], art['ccd'],\
art['problem'], art['x'], \
art['y'])
artifacts.append(oart)
artifacts.sort(key=lambda x : x.ident)
artifacts = np.array(artifacts)
return artifacts
def coarsegrain(stamp, factor=8):
nx, ny = stamp.shape
cnx = nx//factor
cny = ny//factor
cgstamp = np.ndarray((cnx, cny))
for i in np.arange(factor-1):
stamp[i+1::factor,:] += stamp[i::factor,:]
for j in np.arange(factor-1):
stamp[:,j+1::factor] += stamp[:,j::factor]
cgstamp = stamp[factor-1::factor,factor-1::factor]/float(factor**2)
return(cgstamp)
def create_design_matrix(imagenames, bkgnames, artifacts, gridsize=128, cgfactor=8, farts=0.5, save_mb=False):
assert((2048%gridsize==0) & (gridsize<=2048))
assert(len(imagenames)==len(bkgnames))
nxpixels = 4096
nypixels = 2048
nxgridelem = nxpixels//gridsize
nygridelem = nypixels//gridsize
if len(imagenames)==1:
edirs = np.array(glob(imagenames))
bdirs = np.array(glob(bkgnames))
else:
edirs = imagenames
bdirs = bkgnames
eident = []
bident = []
for i, d in enumerate(edirs):
if d=='':continue
print(bdirs[i])
eexpnum = d.split('/')[-1].split('_')
eexpnum[2] = eexpnum[2].split('.')[0]
bexpnum = bdirs[i].split('/')[-1].split('_')
assert('_'.join(eexpnum[1:3])==('_'.join(bexpnum[1:3])))
eident.append('_'.join(eexpnum[1:3]))
bident.append('_'.join(bexpnum[1:3]))
aident = np.array([a.ident for a in artifacts], dtype=str)
print(aident)
print(aident.dtype)
aident = np.sort(aident)
print(aident.dtype)
eident = np.array(eident)
bident = np.array(bident)
assert(len(eident)==len(bident))
eidx = eident.argsort()
bidx = bident.argsort()
eident = eident[eidx]
edirs = edirs[eidx]
bident = bident[bidx]
bdirs = bdirs[bidx]
features = []
labels = []
xgrid = np.arange(0,nxpixels-1,gridsize)
ygrid = np.arange(0,nypixels-1,gridsize)
print('xgrid: {0}'.format(len(xgrid)))
print('ygrid: {0}'.format(len(ygrid)))
print(eident.dtype)
print(aident.dtype)
mb = 0
aidx = np.searchsorted(aident, eident[0])
for i, e in enumerate(eident):
if save_mb:
if (len(features)%100==0) & (len(features)!=0):
features = np.array(features)
labels = np.array(labels)
np.save('/nfs/slac/g/ki/ki23/des/jderose/des/inDianajonES/data/X_{0}_{1}_{2}_{3}_mb{4}.npy'.format(len(eident), farts, gridsize, cgfactor, mb), features)
np.save('/nfs/slac/g/ki/ki23/des/jderose/des/inDianajonES/data/y_{0}_{1}_{2}_{3}_mb{4}.npy'.format(len(eident), farts, gridsize, cgfactor, mb), labels)
features = []
labels = []
mb+=1
assert(e==bident[i])
print(e)
exp = fitsio.read(edirs[i], ext=1)
msk = fitsio.read(edirs[i], ext=2)
#wgt = fitsio.read(edirs[i], ext=3) maybe use weights later
bkg = fitsio.read(bdirs[i], ext=1)
mbse = (exp-bkg)
mbse[msk>np.min(msk)] = -99
ac = []
aa = []
for j, xb in enumerate(xgrid):
for k, yb in enumerate(ygrid):
if (j>(len(xgrid)-1)) or (k>(len(ygrid)-1)): continue
stamp = coarsegrain(mbse[xb:(xb+gridsize),yb:(yb+gridsize)], cgfactor).flatten()
features.append(stamp)
tidx = aidx
problems = []
while(str(artifacts[tidx].ident)==str(e)):
a = artifacts[tidx]
if (a not in ac) and (a not in aa):
ac.append(a)
#Include artifacts near the edges of
#the grid in all pixels near that edge?
if (((xb<=a.x) & (a.x<(xb+gridsize))) &\
((yb<=a.y) & (a.y<(yb+gridsize)))):
print('Bingo!')
problems.append(a.problem)
aa.append(a)
if tidx==(len(artifacts)-1):
break
else:
tidx+=1
#for now, keep only one artifact per grid element
if len(problems)>0:
labels.append(problems[0])
else:
labels.append('no_artifacts')
if len(ac)!=len(aa):
print('Unassigned artifacts: {0}, {1}'.format(ac,aa))
print(ac[0].ident)
print(ac[0].x)
print(ac[0].y)
aidx+=(tidx-aidx)
features = np.array(features)
labels = np.array(labels)
if save_mb:
np.save('/nfs/slac/g/ki/ki23/des/jderose/des/inDianajonES/data/X_{0}_{1}_{2}_{3}_mb{4}.npy'.format(len(eident), farts, gridsize, cgfactor, mb), features)
np.save('/nfs/slac/g/ki/ki23/des/jderose/des/inDianajonES/data/y_{0}_{1}_{2}_{3}_mb{4}.npy'.format(len(eident), farts, gridsize, cgfactor, mb), labels)
return features, labels
def enumerate_labels(labels):
atype = ['Column mask', 'Cosmic ray', 'Cross-talk', 'Edge-bleed', 'Excessive mask', 'Dark rim',
'Dark halo', 'Quilted sky', 'Wavy sky', 'Anti-bleed', 'A/B jump', 'Fringing', 'Tape bump',
'Tree rings', 'Vertical jump', 'Ghost', 'Bright spray', 'Brush strokes', 'Bright arc',
'Satellite', 'Airplane', 'Guiding', 'Shutter', 'Readout', 'Haze', 'Vertical stripes',
'Other...', 'Awesome!', 'no_artifacts']
nums = range(1,len(atype)+1)
ldict = dict(zip(atype,nums))
enumlabels = np.array([ldict[l] for l in labels])
return enumlabels
def get_unrepresentative_training(runs, expids, accds, aidents, nimg=None, farts=0.5):
basedir = '/nfs/slac/g/ki/ki21/cosmo/DESDATA/OPS/red'
aimgnames = []
abkgnames = []
nimgnames = []
nbkgnames = []
idents = []
narts = len(np.unique(aidents))
if (nimg!=None) and (narts/farts<nimg):
print('Not enough artifacts to satisfy artifact fraction and nimg')
return
elif (nimg==None):
nimg = narts/farts
nnull = nimg-narts
else:
narts = nimg*farts
nnull = nimg-narts
print('nimg, nnull, narts: {0}, {1}, {2}'.format(nimg, nnull, narts))
artcount = 0
nullcount = 0
for i, (run, expid, ccd, ident) in enumerate(zip(runs,expids,accds,aidents)):
aim = '{0}/{1}/red/DECam_00{2}/DECam_00{2}_{3}.fits.fz'.format(basedir,run,expid,ccd)
im = glob('{0}/{1}/red/DECam_00{2}/DECam_00{2}_*[0-9].fits.fz'.format(basedir,run,expid))
abkg = '{0}/{1}/red/DECam_00{2}/DECam_00{2}_{3}_bkg.fits.fz'.format(basedir,run,expid,ccd)
bkg = glob('{0}/{1}/red/DECam_00{2}/DECam_00{2}_*_bkg.fits.fz'.format(basedir,run,expid))
im = [ti for ti in im if (os.path.isfile(ti) and ti!=aim)]
bkg = [tb for tb in bkg if (os.path.isfile(tb) and tb!=abkg)]
if os.path.isfile(aim) and os.path.isfile(abkg):
aimgnames.append(aim)
abkgnames.append(abkg)
idents.append(ident)
if len(im)!=len(bkg):continue
nimgnames.extend(im)
nbkgnames.extend(bkg)
aimgnames, uii = np.unique(np.array(aimgnames), return_index=True)
abkgnames = np.array(abkgnames)[uii]
aidents = np.array(idents)[uii]
nimgnames, uii = np.unique(np.array(nimgnames), return_index=True)
nbkgnames = np.array(nbkgnames)[uii]
aidx = np.random.choice(np.arange(len(aimgnames)), size=narts, replace=False)
nidx = np.random.choice(np.arange(len(nimgnames)), size=nnull, replace=False)
aimgnames = aimgnames[aidx]
abkgnames = abkgnames[aidx]
aidents = aidents[aidx]
nimgnames = nimgnames[nidx]
nbkgnames = nbkgnames[nidx]
return aimgnames, abkgnames, nimgnames, nbkgnames, aidents
def get_training_filenames(runs, expids, nimg=None):
basedir = '/nfs/slac/g/ki/ki21/cosmo/DESDATA/OPS/red'
imgnames = []
bkgnames = []
for run, expid in zip(runs,expids):
im = glob('{0}/{1}/red/DECam_00{2}/DECam_00{2}_*[0-9].fits.fz'.format(basedir,run,expid))
bk = glob('{0}/{1}/red/DECam_00{2}/DECam_00{2}_*_bkg.fits.fz'.format(basedir,run,expid))
if len(im)!=len(bk):continue
imgnames.extend(im)
bkgnames.extend(bk)
imgnames = np.unique(np.array(imgnames))
bkgnames = np.unique(np.array(bkgnames))
if nimg!=None:
idx = np.random.choice(np.arange(len(imgnames)), size=nimg, replace=False)
imgnames = imgnames[idx]
bkgnames = bkgnames[idx]
return imgnames, bkgnames
def train_and_validate(runs, expids, ccds, ident, artpath, nimg=None, farts=None, gridsize=128, cgfactor=8, store_design=False):
artifacts = load_release_artifacts(artpath)
aident = np.array([a.ident[2:] for a in artifacts], dtype=str)
ident = np.array(ident, dtype=str)
sii = np.argsort(ident)
ident = ident[sii]
runs = runs[sii]
expids = expids[sii]
ccds = ccds[sii]
aidx = np.array([np.searchsorted(ident, idn) for idn in aident])
aidx[aidx==len(ident)] = -1
aidx = aidx[ident[aidx]==aident]
print(aidx)
print(len(np.unique(aidx)))
print('Selecting training with fraction of artifacts: {0}'.format(farts))
aimgnames, abkgnames, nimgnames, nbkgnames, mident = get_unrepresentative_training(runs[aidx], expids[aidx], ccds[aidx], ident[aidx], nimg=nimg, farts=farts)
imgnames = np.hstack([aimgnames,nimgnames])
bkgnames = np.hstack([abkgnames,nbkgnames])
aii = np.argsort(aident)
aident = aident[aii]
artifacts = artifacts[aii]
aidx = np.unique(np.array([np.searchsorted(aident, m) for m in mident]))
X, y = create_design_matrix(imgnames, bkgnames, artifacts[aidx], gridsize=gridsize, cgfactor=cgfactor, farts=farts, save_mb=store_design)
ey = enumerate_labels(y)
if store_design:
np.save('/nfs/slac/g/ki/ki23/des/jderose/des/inDianajonES/data/X_{0}_{1}_{2}_{3}.npy'.format(nimg, farts, gridsize, cgfactor), X)
np.save('/nfs/slac/g/ki/ki23/des/jderose/des/inDianajonES/data/y_{0}_{1}_{2}_{3}.npy'.format(nimg, farts, gridsize, cgfactor), ey)
tre, tee, cfs, mtrain, clf = pu.diagnostic_vs_m(X, ey, nsteps=5)
edtype = np.dtype([('tre', tre.dtype), ('tee', tee.dtype), ('mtrain', mtrain.dtype)])
print('tre: {0}'.format(tre))
print('tee: {0}'.format(tee))
error_data = np.ndarray(len(tre), dtype=edtype)
error_data['tre'] = tre
error_data['tee'] = tee
error_data['mtrain'] = mtrain
fitsio.write('error_data.{0}.fits'.format(nimg), error_data)
with open('confusion.{0}.{1}.p'.format(nimg,farts), 'w') as fp:
pickle.dump(cfs, fp)
with open('clf.{0}.{1}.p'.format(nimg, farts), 'w') as fp:
pickle.dump(clf, fp)
return tre, tee, cfs, mtrain, clf
if __name__=='__main__':
artpath = sys.argv[1]
nimg = int(sys.argv[2])
fart = float(sys.argv[3])
gs = int(sys.argv[4])
cgf = int(sys.argv[5])
runinfo = np.genfromtxt('/u/ki/jderose/ki23/des/se_exposures/exp_run_info.csv', dtype=None, delimiter=',', skip_header=1)
ident = np.array(['_'.join([str(r['f5']),str(r['f4'])]) for r in runinfo])
tre, tee, cfs, mtrain, clf = train_and_validate(runinfo['f2'], runinfo['f5'], runinfo['f4'], ident, artpath, nimg=nimg, farts=fart, gridsize=gs, cgfactor=cgf, store_design=True)
|
[
"pickle.dump",
"json.load",
"plot_utils.diagnostic_vs_m",
"numpy.dtype",
"numpy.searchsorted",
"numpy.genfromtxt",
"numpy.hstack",
"numpy.argsort",
"numpy.sort",
"fitsio.read",
"os.path.isfile",
"numpy.arange",
"numpy.array",
"numpy.min",
"glob.glob",
"numpy.ndarray",
"numpy.unique"
] |
[((531, 556), 'glob.glob', 'glob', (["(artifact_base + '*')"], {}), "(artifact_base + '*')\n", (535, 556), False, 'from glob import glob\n'), ((1103, 1122), 'numpy.array', 'np.array', (['artifacts'], {}), '(artifacts)\n', (1111, 1122), True, 'import numpy as np\n'), ((1270, 1292), 'numpy.ndarray', 'np.ndarray', (['(cnx, cny)'], {}), '((cnx, cny))\n', (1280, 1292), True, 'import numpy as np\n'), ((1306, 1327), 'numpy.arange', 'np.arange', (['(factor - 1)'], {}), '(factor - 1)\n', (1315, 1327), True, 'import numpy as np\n'), ((1391, 1412), 'numpy.arange', 'np.arange', (['(factor - 1)'], {}), '(factor - 1)\n', (1400, 1412), True, 'import numpy as np\n'), ((2502, 2551), 'numpy.array', 'np.array', (['[a.ident for a in artifacts]'], {'dtype': 'str'}), '([a.ident for a in artifacts], dtype=str)\n', (2510, 2551), True, 'import numpy as np\n'), ((2607, 2622), 'numpy.sort', 'np.sort', (['aident'], {}), '(aident)\n', (2614, 2622), True, 'import numpy as np\n'), ((2660, 2676), 'numpy.array', 'np.array', (['eident'], {}), '(eident)\n', (2668, 2676), True, 'import numpy as np\n'), ((2690, 2706), 'numpy.array', 'np.array', (['bident'], {}), '(bident)\n', (2698, 2706), True, 'import numpy as np\n'), ((2961, 2997), 'numpy.arange', 'np.arange', (['(0)', '(nxpixels - 1)', 'gridsize'], {}), '(0, nxpixels - 1, gridsize)\n', (2970, 2997), True, 'import numpy as np\n'), ((3006, 3042), 'numpy.arange', 'np.arange', (['(0)', '(nypixels - 1)', 'gridsize'], {}), '(0, nypixels - 1, gridsize)\n', (3015, 3042), True, 'import numpy as np\n'), ((3195, 3229), 'numpy.searchsorted', 'np.searchsorted', (['aident', 'eident[0]'], {}), '(aident, eident[0])\n', (3210, 3229), True, 'import numpy as np\n'), ((5702, 5720), 'numpy.array', 'np.array', (['features'], {}), '(features)\n', (5710, 5720), True, 'import numpy as np\n'), ((5734, 5750), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (5742, 5750), True, 'import numpy as np\n'), ((6678, 6714), 'numpy.array', 'np.array', (['[ldict[l] for l in labels]'], {}), '([ldict[l] for l in labels])\n', (6686, 6714), True, 'import numpy as np\n'), ((9891, 9944), 'numpy.array', 'np.array', (['[a.ident[2:] for a in artifacts]'], {'dtype': 'str'}), '([a.ident[2:] for a in artifacts], dtype=str)\n', (9899, 9944), True, 'import numpy as np\n'), ((9957, 9983), 'numpy.array', 'np.array', (['ident'], {'dtype': 'str'}), '(ident, dtype=str)\n', (9965, 9983), True, 'import numpy as np\n'), ((9994, 10011), 'numpy.argsort', 'np.argsort', (['ident'], {}), '(ident)\n', (10004, 10011), True, 'import numpy as np\n'), ((10548, 10581), 'numpy.hstack', 'np.hstack', (['[aimgnames, nimgnames]'], {}), '([aimgnames, nimgnames])\n', (10557, 10581), True, 'import numpy as np\n'), ((10596, 10629), 'numpy.hstack', 'np.hstack', (['[abkgnames, nbkgnames]'], {}), '([abkgnames, nbkgnames])\n', (10605, 10629), True, 'import numpy as np\n'), ((10640, 10658), 'numpy.argsort', 'np.argsort', (['aident'], {}), '(aident)\n', (10650, 10658), True, 'import numpy as np\n'), ((11301, 11336), 'plot_utils.diagnostic_vs_m', 'pu.diagnostic_vs_m', (['X', 'ey'], {'nsteps': '(5)'}), '(X, ey, nsteps=5)\n', (11319, 11336), True, 'import plot_utils as pu\n'), ((11351, 11427), 'numpy.dtype', 'np.dtype', (["[('tre', tre.dtype), ('tee', tee.dtype), ('mtrain', mtrain.dtype)]"], {}), "([('tre', tre.dtype), ('tee', tee.dtype), ('mtrain', mtrain.dtype)])\n", (11359, 11427), True, 'import numpy as np\n'), ((12117, 12233), 'numpy.genfromtxt', 'np.genfromtxt', (['"""/u/ki/jderose/ki23/des/se_exposures/exp_run_info.csv"""'], {'dtype': 'None', 'delimiter': '""","""', 'skip_header': '(1)'}), "('/u/ki/jderose/ki23/des/se_exposures/exp_run_info.csv', dtype\n =None, delimiter=',', skip_header=1)\n", (12130, 12233), True, 'import numpy as np\n'), ((3929, 3957), 'fitsio.read', 'fitsio.read', (['edirs[i]'], {'ext': '(1)'}), '(edirs[i], ext=1)\n', (3940, 3957), False, 'import fitsio\n'), ((3972, 4000), 'fitsio.read', 'fitsio.read', (['edirs[i]'], {'ext': '(2)'}), '(edirs[i], ext=2)\n', (3983, 4000), False, 'import fitsio\n'), ((4083, 4111), 'fitsio.read', 'fitsio.read', (['bdirs[i]'], {'ext': '(1)'}), '(bdirs[i], ext=1)\n', (4094, 4111), False, 'import fitsio\n'), ((6998, 7016), 'numpy.unique', 'np.unique', (['aidents'], {}), '(aidents)\n', (7007, 7016), True, 'import numpy as np\n'), ((8305, 8324), 'numpy.array', 'np.array', (['aimgnames'], {}), '(aimgnames)\n', (8313, 8324), True, 'import numpy as np\n'), ((8361, 8380), 'numpy.array', 'np.array', (['abkgnames'], {}), '(abkgnames)\n', (8369, 8380), True, 'import numpy as np\n'), ((8400, 8416), 'numpy.array', 'np.array', (['idents'], {}), '(idents)\n', (8408, 8416), True, 'import numpy as np\n'), ((8454, 8473), 'numpy.array', 'np.array', (['nimgnames'], {}), '(nimgnames)\n', (8462, 8473), True, 'import numpy as np\n'), ((8511, 8530), 'numpy.array', 'np.array', (['nbkgnames'], {}), '(nbkgnames)\n', (8519, 8530), True, 'import numpy as np\n'), ((9430, 9448), 'numpy.array', 'np.array', (['imgnames'], {}), '(imgnames)\n', (9438, 9448), True, 'import numpy as np\n'), ((9475, 9493), 'numpy.array', 'np.array', (['bkgnames'], {}), '(bkgnames)\n', (9483, 9493), True, 'import numpy as np\n'), ((11780, 11800), 'pickle.dump', 'pickle.dump', (['cfs', 'fp'], {}), '(cfs, fp)\n', (11791, 11800), False, 'import pickle\n'), ((11872, 11892), 'pickle.dump', 'pickle.dump', (['clf', 'fp'], {}), '(clf, fp)\n', (11883, 11892), False, 'import pickle\n'), ((646, 659), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (655, 659), False, 'import json\n'), ((1944, 1960), 'glob.glob', 'glob', (['imagenames'], {}), '(imagenames)\n', (1948, 1960), False, 'from glob import glob\n'), ((1987, 2001), 'glob.glob', 'glob', (['bkgnames'], {}), '(bkgnames)\n', (1991, 2001), False, 'from glob import glob\n'), ((8027, 8046), 'os.path.isfile', 'os.path.isfile', (['aim'], {}), '(aim)\n', (8041, 8046), False, 'import os\n'), ((8051, 8071), 'os.path.isfile', 'os.path.isfile', (['abkg'], {}), '(abkg)\n', (8065, 8071), False, 'import os\n'), ((10123, 10150), 'numpy.searchsorted', 'np.searchsorted', (['ident', 'idn'], {}), '(ident, idn)\n', (10138, 10150), True, 'import numpy as np\n'), ((10270, 10285), 'numpy.unique', 'np.unique', (['aidx'], {}), '(aidx)\n', (10279, 10285), True, 'import numpy as np\n'), ((3372, 3390), 'numpy.array', 'np.array', (['features'], {}), '(features)\n', (3380, 3390), True, 'import numpy as np\n'), ((3416, 3432), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (3424, 3432), True, 'import numpy as np\n'), ((4155, 4166), 'numpy.min', 'np.min', (['msk'], {}), '(msk)\n', (4161, 4166), True, 'import numpy as np\n'), ((10746, 10772), 'numpy.searchsorted', 'np.searchsorted', (['aident', 'm'], {}), '(aident, m)\n', (10761, 10772), True, 'import numpy as np\n'), ((7912, 7930), 'os.path.isfile', 'os.path.isfile', (['ti'], {}), '(ti)\n', (7926, 7930), False, 'import os\n'), ((7981, 7999), 'os.path.isfile', 'os.path.isfile', (['tb'], {}), '(tb)\n', (7995, 7999), False, 'import os\n')]
|
from math import log
import operator
import numpy as np
"""
从文件读入数据,返回一个二维数组
参数:filename 文件路径
"""
def data_input(filename):
r_train = open(filename,'r')
data = r_train.readlines()
data_x = []
for line in data:
line = line.strip('\n').split(' ')
data_x.append(line)
return data_x
"""
计算数据集的熵值,通过统计标签的个数和种类或者每种标签的比例
参数 dataSet 数据集
"""
def calcShannonEnt(dataSet):
#数据集行数
numEntries=len(dataSet)
#保存每个标签(label)出现次数的字典
labelCounts={}
#对每组特征向量进行统计
for featVec in dataSet:
currentLabel=featVec[-1]
#提取标签信息
if currentLabel not in labelCounts.keys():#如果标签没有放入统计次数的字典,添加进去
labelCounts[currentLabel]=0
labelCounts[currentLabel]+=1 #label计数
shannonEnt=0.0 #经验熵
#计算经验熵
for key in labelCounts:
prob=float(labelCounts[key])/numEntries #选择该标签的概率
shannonEnt-=prob*log(prob,2) #利用公式计算
return shannonEnt #返回经验熵
"""
从原数据集获得子数据集,不包括axis对应的数据
参数 dataSet 原始数据集
axis 划分属性
value 划分属性的划分值
"""
def splitDataSet(dataSet,axis,value):
#创建返回的数据集列表
retDataSet=[]
#遍历数据集
for featVec in dataSet:
if featVec[axis]==value: #去掉axis特征
reduceFeatVec=featVec[:axis]
#将符合条件的添加到返回的数据集
reduceFeatVec.extend(featVec[axis+1:])
retDataSet.append(reduceFeatVec) #返回划分后的数据集
return retDataSet
"""
选出信息增益和信息增益率最大的属性进行数据集的划分
参数 dataSet 数据集
"""
def chooseBestFeatureToSplit(dataSet):
#特征数量
info_a = []
info_a_rate = []
numFeatures = len(dataSet[0]) - 1
#计数数据集的香农熵
baseEntropy = calcShannonEnt(dataSet)
#信息增益
bestInfoGain_rate = 0.0
#最优特征的索引值
bestFeature = -1
#遍历所有特征
for i in range(numFeatures):
# 获取dataSet的第i个所有特征
featList = [example[i] for example in dataSet]
#创建set集合{},元素不可重复
uniqueVals = set(featList)
#经验条件熵
newEntropy = 0.0
ha = 0
#计算信息增益
for value in uniqueVals:
#subDataSet划分后的子集
subDataSet = splitDataSet(dataSet, i, value)
#计算子集的概率
prob = len(subDataSet) / float(len(dataSet))
#根据公式计算经验条件熵
newEntropy += prob * calcShannonEnt((subDataSet))
if prob == 1:
ha = 1
else:
ha -= prob*log(prob,2)
#信息增益
infoGain = baseEntropy - newEntropy
infoGain_rate = infoGain / ha#######计算熵值增益率
#计算信息增益
info_a.append(infoGain)
info_a_rate.append(infoGain_rate)
m = np.mean(info_a)
for i in range(numFeatures):
if info_a[i] > m:
if info_a_rate[i] > bestInfoGain_rate:
bestInfoGain_rate = info_a_rate[i]
bestFeature = i
return bestFeature
"""
统计当前标签集中元素个数最多的标签
参数:classList当前标签集
"""
def majorityCnt(classList):
classCount={}
#统计classList中每个元素出现的次数
for vote in classList:
if vote not in classCount.keys():
classCount[vote]=0
classCount[vote]+=1
#根据字典的值降序排列
sortedClassCount=sorted(classCount.items(),key=operator.itemgetter(1),reverse=True)
return sortedClassCount[0][0]
"""
建立决策树
参数 dataSet 训练数据集
labels 训练数据集属性的标签
featLabels 储存决策树的非叶子节点
"""
def createTree(dataSet,labels,featLabels):
#取分类标签(是否放贷:yes or no)
classList=[example[-1] for example in dataSet]
#如果类别完全相同,则停止继续划分
if classList.count(classList[0])==len(classList):
return classList[0]
#遍历完所有特征时返回出现次数最多的类标签
if len(dataSet[0])==1:
return majorityCnt(classList)
#选择最优特征
bestFeat=chooseBestFeatureToSplit(dataSet)
#最优特征的标签
bestFeatLabel=labels[bestFeat]
featLabels.append(bestFeatLabel)
#根据最优特征的标签生成树
myTree={bestFeatLabel:{}}
#删除已经使用的特征标签
t_label = labels[:]
del(t_label[bestFeat])
#得到训练集中所有最优特征的属性值
featValues=[example[bestFeat] for example in dataSet]
#去掉重复的属性值
uniqueVls=set(featValues)
#遍历特征,创建决策树
for value in uniqueVls:
#统计每个属性地取值个数,如果出现分支少于取值个数,就随机填标签
myTree[bestFeatLabel][value]=createTree(splitDataSet(dataSet,bestFeat,value), t_label,featLabels)
return myTree
"""
根据决策树对训练数据进行预测
参数 inputTree 决策树
labels 属性的标签集,来获得当前决策树节点对应的属性
testVec 测试数据
"""
def classify(inputTree,labels,testVec):
#获取决策树节点
firstStr=next(iter(inputTree))#节点标签
#下一个字典
secondDict=inputTree[firstStr]#子树
featIndex=labels.index(firstStr)#节点标签序号
classLabel=[]
for key in secondDict.keys():
if testVec[featIndex]==key:#找寻适合分支
if type(secondDict[key]).__name__=='dict':#仍然有子树
classLabel=classify(secondDict[key],labels,testVec)
else:
classLabel=secondDict[key]
return classLabel
if __name__=='__main__':
file_name = r"C:\Users\liuji\Desktop\third\知识分析\决策树\决策树_data\golf_train.Z7nJ667n.txt"
file_tname = r"C:\Users\liuji\Desktop\third\知识分析\决策树\决策树_data\golf_test.6meJ6p3T.txt"
labels = ['a', 'b', 'c', 'd']
dataSet = data_input(file_name)
featLabels=[]
myTree=createTree(dataSet,labels,featLabels)
#测试数据
data_test = data_input(file_tname)
print(myTree)
ans = []
for i in range(len(data_test)):
ans.append(classify(myTree,labels,data_test[i]))
print(ans[i])
|
[
"math.log",
"numpy.mean",
"operator.itemgetter"
] |
[((2629, 2644), 'numpy.mean', 'np.mean', (['info_a'], {}), '(info_a)\n', (2636, 2644), True, 'import numpy as np\n'), ((917, 929), 'math.log', 'log', (['prob', '(2)'], {}), '(prob, 2)\n', (920, 929), False, 'from math import log\n'), ((3192, 3214), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (3211, 3214), False, 'import operator\n'), ((2402, 2414), 'math.log', 'log', (['prob', '(2)'], {}), '(prob, 2)\n', (2405, 2414), False, 'from math import log\n')]
|
# %%
# Imports
from BoundaryConditions.Simulation.SimulationData import getSimData
from GenericModel.Design import _addAgents, _loadBuildingData
from SystemComponentsFast import simulate, Building, Cell
import pandas as pd
import numpy as np
from PostProcesing import plots
import logging
# %%
# Configure logging
FORMAT = ("%(levelname)s %(name)s %(asctime)-15s "
"%(filename)s:%(lineno)d %(message)s")
logging.basicConfig(format=FORMAT)
logging.getLogger().setLevel(logging.WARNING)
# %%
# Parameters & Data
# simulation time
start = '01.01.2020'
end = '01.01.2021'
# environment
region = "East"
# generate data using helper functions
nSteps, time, SLP, HWP, Weather, Solar = getSimData(start, end, region)
# norm parameters (irradiance, temperature)
climate = pd.read_hdf("./BoundaryConditions/Weather/" + region +
".h5", 'Standard')
# create cell
cell = Cell(climate.loc['EgNorm [kWh/m^2]', 'Value'],
climate.loc['ToutNorm [degC]', 'Value'],
nSteps)
# building parameters
bType = "FSH"
bClass = "class_3"
mState = "original" # "modernised"
airState = "VentilationFree" # "VentilationMech"
isAtDHN = False
# get geometry
Geo, U, g, n = _loadBuildingData(bType)
# reformat data
a_uv_values = np.array([Geo.loc['Areas'].values.T[0],
U.loc['UValues', (bClass, mState)]
]).T
if bClass == 'class_5':
infState = 'new'
else:
infState = mState
# create building
building = Building(Geo.loc['nUnits'].values.astype(np.uint32)[0][0],
Geo.loc[('A_living', ''), 'Value'], a_uv_values,
U.loc['DeltaU', (bClass, mState)],
n.loc['Infiltration', infState],
n.loc[airState, infState],
(Geo.loc['cp_effective'] * Geo.loc['Volume']).Value,
g.loc[mState, bClass],
Geo.loc[('Volume')].values.astype(np.uint32)[0][0],
isAtDHN, cell.t_out_n, nSteps
)
# create and add agents
_addAgents(building, 1., 1., 0.)
# add dimensioned chp
building.add_dimensioned_chp(nSteps)
# add building to cell
cell.add_building(building)
# %%
# Run simulation
simulate(cell, nSteps, SLP.to_dict('list'), HWP, Weather.to_dict('list'),
Solar.to_dict('list'))
# %%
# Plot cell power balance
plots.cellPowerBalance(cell, time)
# %%
# Plot cell energy balance
plots.cellEnergyBalance(cell, time)
# %%
# Plot building temperature
plots.buildingTemperature(cell.buildings[0], time, Weather['T [degC]'])
# %%
# Plot chp production
title = "Thermal output of chp system"
gen_t = (np.array(cell.buildings[0].get_chp_system().chp.gen_t.get_memory()) +
np.array(cell.buildings[0].get_chp_system().boiler.gen_t.
get_memory())) \
/ 1000.
load_t = np.array(cell.buildings[0].load_t.get_memory()) / 1000.
unitPrefix = "K"
plots.arbitraryBalance(gen_t, load_t, time, unitPrefix, title)
# %%
# Plot storage charging state
storage = cell.buildings[0].get_chp_system().storage
plots.chargeState(storage, time)
# %%
|
[
"BoundaryConditions.Simulation.SimulationData.getSimData",
"pandas.read_hdf",
"logging.basicConfig",
"GenericModel.Design._addAgents",
"PostProcesing.plots.cellEnergyBalance",
"GenericModel.Design._loadBuildingData",
"PostProcesing.plots.buildingTemperature",
"PostProcesing.plots.chargeState",
"numpy.array",
"PostProcesing.plots.cellPowerBalance",
"PostProcesing.plots.arbitraryBalance",
"SystemComponentsFast.Cell",
"logging.getLogger"
] |
[((415, 449), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'FORMAT'}), '(format=FORMAT)\n', (434, 449), False, 'import logging\n'), ((690, 720), 'BoundaryConditions.Simulation.SimulationData.getSimData', 'getSimData', (['start', 'end', 'region'], {}), '(start, end, region)\n', (700, 720), False, 'from BoundaryConditions.Simulation.SimulationData import getSimData\n'), ((775, 848), 'pandas.read_hdf', 'pd.read_hdf', (["('./BoundaryConditions/Weather/' + region + '.h5')", '"""Standard"""'], {}), "('./BoundaryConditions/Weather/' + region + '.h5', 'Standard')\n", (786, 848), True, 'import pandas as pd\n'), ((892, 992), 'SystemComponentsFast.Cell', 'Cell', (["climate.loc['EgNorm [kWh/m^2]', 'Value']", "climate.loc['ToutNorm [degC]', 'Value']", 'nSteps'], {}), "(climate.loc['EgNorm [kWh/m^2]', 'Value'], climate.loc[\n 'ToutNorm [degC]', 'Value'], nSteps)\n", (896, 992), False, 'from SystemComponentsFast import simulate, Building, Cell\n'), ((1200, 1224), 'GenericModel.Design._loadBuildingData', '_loadBuildingData', (['bType'], {}), '(bType)\n', (1217, 1224), False, 'from GenericModel.Design import _addAgents, _loadBuildingData\n'), ((2052, 2087), 'GenericModel.Design._addAgents', '_addAgents', (['building', '(1.0)', '(1.0)', '(0.0)'], {}), '(building, 1.0, 1.0, 0.0)\n', (2062, 2087), False, 'from GenericModel.Design import _addAgents, _loadBuildingData\n'), ((2356, 2390), 'PostProcesing.plots.cellPowerBalance', 'plots.cellPowerBalance', (['cell', 'time'], {}), '(cell, time)\n', (2378, 2390), False, 'from PostProcesing import plots\n'), ((2424, 2459), 'PostProcesing.plots.cellEnergyBalance', 'plots.cellEnergyBalance', (['cell', 'time'], {}), '(cell, time)\n', (2447, 2459), False, 'from PostProcesing import plots\n'), ((2494, 2565), 'PostProcesing.plots.buildingTemperature', 'plots.buildingTemperature', (['cell.buildings[0]', 'time', "Weather['T [degC]']"], {}), "(cell.buildings[0], time, Weather['T [degC]'])\n", (2519, 2565), False, 'from PostProcesing import plots\n'), ((2913, 2975), 'PostProcesing.plots.arbitraryBalance', 'plots.arbitraryBalance', (['gen_t', 'load_t', 'time', 'unitPrefix', 'title'], {}), '(gen_t, load_t, time, unitPrefix, title)\n', (2935, 2975), False, 'from PostProcesing import plots\n'), ((3065, 3097), 'PostProcesing.plots.chargeState', 'plots.chargeState', (['storage', 'time'], {}), '(storage, time)\n', (3082, 3097), False, 'from PostProcesing import plots\n'), ((1255, 1331), 'numpy.array', 'np.array', (["[Geo.loc['Areas'].values.T[0], U.loc['UValues', (bClass, mState)]]"], {}), "([Geo.loc['Areas'].values.T[0], U.loc['UValues', (bClass, mState)]])\n", (1263, 1331), True, 'import numpy as np\n'), ((450, 469), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (467, 469), False, 'import logging\n')]
|
import json
import numpy as np
from tqdm import tqdm
from pathlib import Path, PosixPath
from multiprocessing import Pool
from librosa.feature.spectral import _spectrogram
from librosa.feature import tempogram, fourier_tempogram, melspectrogram, tonnetz
from librosa.feature import mfcc, chroma_stft, chroma_cqt, chroma_cens, poly_features
from tools.feature_extractor import SignalLoader, extract_spec_features
def calc_fbank(y: np.ndarray,
frame_len: int = 800,
shift: int = 400,
nfft: int = 512,
nfilt: int = 42,
sr: int = 16000,
pre_emphasis: float = 0.97,
normalize: bool = True,
eps: float = 1e-8):
"""
Calculates Filter banks from the input signal
Parameters:
y: np.ndarray - input signal
frame_len: float - length of the frame
shift: float - shift of the frames
nfft: int - number of dft point
nfilt: int - number of filters
sample_rate: int - sample rate of the input signal
pre_emphasis: float - preprocessing constant
normalize: bool - normalize fbank or not
eps: float - epsilon constant
Returns:
filter_banks: np.ndarray - filter banks
"""
emph_signal = np.append(y[0], y[1:] - pre_emphasis * y[:-1])
signal_len = len(emph_signal)
num_frames = int(np.ceil(np.abs(signal_len - frame_len) / shift))
pad_signal_len = num_frames * shift + frame_len
z = np.zeros((pad_signal_len - signal_len))
# Pad Signal to make sure that all frames have equal number
# of samples without truncating any samples from the original signal
pad_signal = np.append(emph_signal, z)
indices = np.tile(np.arange(0, frame_len), (num_frames, 1)) +\
np.tile(np.arange(0, num_frames * shift, shift), (frame_len, 1)).T
frames = pad_signal[indices.astype(np.int32, copy=False)]
# Hamming window
frames *= np.hamming(frame_len)
mag_frames = np.absolute(np.fft.rfft(frames, nfft))
pow_frames = (mag_frames)**2 / nfft
low_freq_mel = 0
high_freq_mel = (2595 * np.log10(1 + (sr / 2) / 700))
mel_points = np.linspace(low_freq_mel, high_freq_mel, nfilt + 2)
hz_points = (700 * (10**(mel_points / 2595) - 1))
bin = np.floor((nfft + 1) * hz_points / sr)
fbank = np.zeros((nfilt, int(np.floor(nfft / 2 + 1))))
for m in range(1, nfilt + 1):
f_m_minus = int(bin[m - 1]) # left
f_m = int(bin[m]) # center
f_m_plus = int(bin[m + 1]) # right
for k in range(f_m_minus, f_m):
fbank[m - 1, k] = (k - bin[m - 1]) / (bin[m] - bin[m - 1])
for k in range(f_m, f_m_plus):
fbank[m - 1, k] = (bin[m + 1] - k) / (bin[m + 1] - bin[m])
filter_banks = np.dot(pow_frames, fbank.T)
# numerical stability
filter_banks = np.where(filter_banks == 0, np.finfo(float).eps, filter_banks)
filter_banks = 20 * np.log10(filter_banks) # dB
if normalize:
filter_banks -= (np.mean(filter_banks, axis=0) + eps)
return filter_banks
SAMPLE_RATE = 16000
feature_functions = {
'spec': SignalLoader(_spectrogram, get_first=True),
'mfcc': SignalLoader(mfcc, abs_val=True, **{'sr': SAMPLE_RATE}),
'chroma_stft': SignalLoader(chroma_stft, **{'sr': SAMPLE_RATE}),
'chroma_cqt': SignalLoader(chroma_cqt, **{'sr': SAMPLE_RATE}),
'chroma_cens': SignalLoader(chroma_cens, **{'sr': SAMPLE_RATE}),
'mel_spec': SignalLoader(melspectrogram, **{'sr': SAMPLE_RATE}),
'tonnetz': SignalLoader(tonnetz, abs_val=True, **{'sr': SAMPLE_RATE}),
'poly': SignalLoader(poly_features, abs_val=True, **{'sr': SAMPLE_RATE, 'order': 5}),
'tempogram': SignalLoader(tempogram, abs_val=True, **{'sr': SAMPLE_RATE}),
'fourier_tempogram': SignalLoader(fourier_tempogram, abs_val=True, **{'sr': SAMPLE_RATE}),
'fbank': SignalLoader(calc_fbank, abs_val=True, **{'sr': SAMPLE_RATE})
}
def calc_spec_statistics(paths: list,
feature_name: str,
feature_config: dict,
feature_type: str,
save_path: PosixPath = None,
save_feature_names: bool = False):
"""
Calculates stat. features of the signal spectrum
Parameters:
paths: np.ndarray - paths to the audio files
feature_name: str - name of the processed spectral feature
feature_config: dict - configuration of processed spectral feature
feature_type: str - type of the processed feature (train, dev or val)
save_path: PosixPath - path for saving of calculated features
save_feature_names: bool - save names of calculated features or not
Returns:
statistics: np.ndarray - stat. features extracted from the spectrum of the signal
"""
assert feature_name in feature_functions.keys()
assert feature_type in ('train', 'dev', 'val')
statistics = []
stat_func = feature_functions[feature_name]
with tqdm(total=len(paths)) as pbar:
for spec in map(stat_func, paths):
features, names = extract_spec_features(spec, feature_config, prefix=feature_name)
statistics.append(features)
pbar.update()
if save_path is not None and save_feature_names:
(save_path / 'feature_names').mkdir(exist_ok=True, parents=True)
np.save(save_path / 'feature_names' / f'{feature_name}_names', names)
statistics = np.array(statistics, dtype=np.float32)
if save_path is not None:
(save_path / feature_type).mkdir(exist_ok=True, parents=True)
np.save(save_path / f'{feature_type}/{feature_name}', statistics)
return statistics
if __name__ == '__main__':
root_dir = Path(__file__).parent
file_path = root_dir / 'tests' / 'LA_T_1000137.flac'
config_path = root_dir / 'configs'
with open(config_path / 'spectral_features.json', 'r') as config:
spectral_features_config = json.load(config)
paths = [file_path]
feature_type = 'train'
for feature_name, feature_config in spectral_features_config.items():
statistics = calc_spec_statistics(paths=paths,
feature_name=feature_name,
feature_config=feature_config,
feature_type=feature_type)
statistics_test = np.load(root_dir / 'tests' / 'spectral_features' / f'LA_T_1000137_{feature_name}.npy')
assert np.all(statistics == statistics_test), f'Test for {feature_name} not passed'
print(feature_name, statistics.shape)
print('OK')
|
[
"numpy.load",
"numpy.fft.rfft",
"numpy.abs",
"numpy.floor",
"pathlib.Path",
"numpy.mean",
"numpy.arange",
"tools.feature_extractor.extract_spec_features",
"tools.feature_extractor.SignalLoader",
"numpy.append",
"numpy.finfo",
"numpy.linspace",
"numpy.log10",
"numpy.save",
"numpy.hamming",
"numpy.dot",
"numpy.all",
"json.load",
"numpy.zeros",
"numpy.array"
] |
[((1262, 1308), 'numpy.append', 'np.append', (['y[0]', '(y[1:] - pre_emphasis * y[:-1])'], {}), '(y[0], y[1:] - pre_emphasis * y[:-1])\n', (1271, 1308), True, 'import numpy as np\n'), ((1478, 1515), 'numpy.zeros', 'np.zeros', (['(pad_signal_len - signal_len)'], {}), '(pad_signal_len - signal_len)\n', (1486, 1515), True, 'import numpy as np\n'), ((1678, 1703), 'numpy.append', 'np.append', (['emph_signal', 'z'], {}), '(emph_signal, z)\n', (1687, 1703), True, 'import numpy as np\n'), ((1951, 1972), 'numpy.hamming', 'np.hamming', (['frame_len'], {}), '(frame_len)\n', (1961, 1972), True, 'import numpy as np\n'), ((2166, 2217), 'numpy.linspace', 'np.linspace', (['low_freq_mel', 'high_freq_mel', '(nfilt + 2)'], {}), '(low_freq_mel, high_freq_mel, nfilt + 2)\n', (2177, 2217), True, 'import numpy as np\n'), ((2282, 2319), 'numpy.floor', 'np.floor', (['((nfft + 1) * hz_points / sr)'], {}), '((nfft + 1) * hz_points / sr)\n', (2290, 2319), True, 'import numpy as np\n'), ((2806, 2833), 'numpy.dot', 'np.dot', (['pow_frames', 'fbank.T'], {}), '(pow_frames, fbank.T)\n', (2812, 2833), True, 'import numpy as np\n'), ((3161, 3203), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['_spectrogram'], {'get_first': '(True)'}), '(_spectrogram, get_first=True)\n', (3173, 3203), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3217, 3272), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['mfcc'], {'abs_val': '(True)'}), "(mfcc, abs_val=True, **{'sr': SAMPLE_RATE})\n", (3229, 3272), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3293, 3341), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['chroma_stft'], {}), "(chroma_stft, **{'sr': SAMPLE_RATE})\n", (3305, 3341), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3361, 3408), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['chroma_cqt'], {}), "(chroma_cqt, **{'sr': SAMPLE_RATE})\n", (3373, 3408), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3429, 3477), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['chroma_cens'], {}), "(chroma_cens, **{'sr': SAMPLE_RATE})\n", (3441, 3477), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3495, 3546), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['melspectrogram'], {}), "(melspectrogram, **{'sr': SAMPLE_RATE})\n", (3507, 3546), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3563, 3621), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['tonnetz'], {'abs_val': '(True)'}), "(tonnetz, abs_val=True, **{'sr': SAMPLE_RATE})\n", (3575, 3621), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3635, 3711), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['poly_features'], {'abs_val': '(True)'}), "(poly_features, abs_val=True, **{'sr': SAMPLE_RATE, 'order': 5})\n", (3647, 3711), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3730, 3790), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['tempogram'], {'abs_val': '(True)'}), "(tempogram, abs_val=True, **{'sr': SAMPLE_RATE})\n", (3742, 3790), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3817, 3885), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['fourier_tempogram'], {'abs_val': '(True)'}), "(fourier_tempogram, abs_val=True, **{'sr': SAMPLE_RATE})\n", (3829, 3885), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((3900, 3961), 'tools.feature_extractor.SignalLoader', 'SignalLoader', (['calc_fbank'], {'abs_val': '(True)'}), "(calc_fbank, abs_val=True, **{'sr': SAMPLE_RATE})\n", (3912, 3961), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((5488, 5526), 'numpy.array', 'np.array', (['statistics'], {'dtype': 'np.float32'}), '(statistics, dtype=np.float32)\n', (5496, 5526), True, 'import numpy as np\n'), ((2002, 2027), 'numpy.fft.rfft', 'np.fft.rfft', (['frames', 'nfft'], {}), '(frames, nfft)\n', (2013, 2027), True, 'import numpy as np\n'), ((2119, 2145), 'numpy.log10', 'np.log10', (['(1 + sr / 2 / 700)'], {}), '(1 + sr / 2 / 700)\n', (2127, 2145), True, 'import numpy as np\n'), ((2966, 2988), 'numpy.log10', 'np.log10', (['filter_banks'], {}), '(filter_banks)\n', (2974, 2988), True, 'import numpy as np\n'), ((5400, 5469), 'numpy.save', 'np.save', (["(save_path / 'feature_names' / f'{feature_name}_names')", 'names'], {}), "(save_path / 'feature_names' / f'{feature_name}_names', names)\n", (5407, 5469), True, 'import numpy as np\n'), ((5635, 5700), 'numpy.save', 'np.save', (["(save_path / f'{feature_type}/{feature_name}')", 'statistics'], {}), "(save_path / f'{feature_type}/{feature_name}', statistics)\n", (5642, 5700), True, 'import numpy as np\n'), ((5768, 5782), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (5772, 5782), False, 'from pathlib import Path, PosixPath\n'), ((5991, 6008), 'json.load', 'json.load', (['config'], {}), '(config)\n', (6000, 6008), False, 'import json\n'), ((6427, 6517), 'numpy.load', 'np.load', (["(root_dir / 'tests' / 'spectral_features' / f'LA_T_1000137_{feature_name}.npy')"], {}), "(root_dir / 'tests' / 'spectral_features' /\n f'LA_T_1000137_{feature_name}.npy')\n", (6434, 6517), True, 'import numpy as np\n'), ((6529, 6566), 'numpy.all', 'np.all', (['(statistics == statistics_test)'], {}), '(statistics == statistics_test)\n', (6535, 6566), True, 'import numpy as np\n'), ((1727, 1750), 'numpy.arange', 'np.arange', (['(0)', 'frame_len'], {}), '(0, frame_len)\n', (1736, 1750), True, 'import numpy as np\n'), ((2907, 2922), 'numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (2915, 2922), True, 'import numpy as np\n'), ((3039, 3068), 'numpy.mean', 'np.mean', (['filter_banks'], {'axis': '(0)'}), '(filter_banks, axis=0)\n', (3046, 3068), True, 'import numpy as np\n'), ((5126, 5190), 'tools.feature_extractor.extract_spec_features', 'extract_spec_features', (['spec', 'feature_config'], {'prefix': 'feature_name'}), '(spec, feature_config, prefix=feature_name)\n', (5147, 5190), False, 'from tools.feature_extractor import SignalLoader, extract_spec_features\n'), ((1377, 1407), 'numpy.abs', 'np.abs', (['(signal_len - frame_len)'], {}), '(signal_len - frame_len)\n', (1383, 1407), True, 'import numpy as np\n'), ((1794, 1833), 'numpy.arange', 'np.arange', (['(0)', '(num_frames * shift)', 'shift'], {}), '(0, num_frames * shift, shift)\n', (1803, 1833), True, 'import numpy as np\n'), ((2354, 2376), 'numpy.floor', 'np.floor', (['(nfft / 2 + 1)'], {}), '(nfft / 2 + 1)\n', (2362, 2376), True, 'import numpy as np\n')]
|
"""
Tests for structural time series models
Author: <NAME>
License: Simplified-BSD
"""
import warnings
import numpy as np
from numpy.testing import assert_equal, assert_allclose, assert_raises
import pandas as pd
import pytest
from statsmodels.datasets import macrodata
from statsmodels.tools.sm_exceptions import SpecificationWarning
from statsmodels.tsa.statespace import structural
from statsmodels.tsa.statespace.structural import UnobservedComponents
from statsmodels.tsa.statespace.tests.results import results_structural
dta = macrodata.load_pandas().data
dta.index = pd.date_range(start='1959-01-01', end='2009-07-01', freq='QS')
def run_ucm(name):
true = getattr(results_structural, name)
for model in true['models']:
kwargs = model.copy()
kwargs.update(true['kwargs'])
# Make a copy of the data
values = dta.copy()
freq = kwargs.pop('freq', None)
if freq is not None:
values.index = pd.date_range(start='1959-01-01', periods=len(dta),
freq=freq)
# Test pandas exog
if 'exog' in kwargs:
# Default value here is pd.Series object
exog = np.log(values['realgdp'])
# Also allow a check with a 1-dim numpy array
if kwargs['exog'] == 'numpy':
exog = exog.values.squeeze()
kwargs['exog'] = exog
# Create the model
mod = UnobservedComponents(values['unemp'], **kwargs)
# Smoke test for starting parameters, untransform, transform
# Also test that transform and untransform are inverses
mod.start_params
roundtrip = mod.transform_params(
mod.untransform_params(mod.start_params))
assert_allclose(mod.start_params, roundtrip)
# Fit the model at the true parameters
res_true = mod.filter(true['params'])
# Check that the cycle bounds were computed correctly
freqstr = freq[0] if freq is not None else values.index.freqstr[0]
if 'cycle_period_bounds' in kwargs:
cycle_period_bounds = kwargs['cycle_period_bounds']
elif freqstr == 'A':
cycle_period_bounds = (1.5, 12)
elif freqstr == 'Q':
cycle_period_bounds = (1.5*4, 12*4)
elif freqstr == 'M':
cycle_period_bounds = (1.5*12, 12*12)
else:
# If we have no information on data frequency, require the
# cycle frequency to be between 0 and pi
cycle_period_bounds = (2, np.inf)
# Test that the cycle frequency bound is correct
assert_equal(mod.cycle_frequency_bound,
(2*np.pi / cycle_period_bounds[1],
2*np.pi / cycle_period_bounds[0]))
# Test that the likelihood is correct
rtol = true.get('rtol', 1e-7)
atol = true.get('atol', 0)
assert_allclose(res_true.llf, true['llf'], rtol=rtol, atol=atol)
# Optional smoke test for plot_components
try:
import matplotlib.pyplot as plt
try:
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
except ImportError:
pass
fig = plt.figure()
res_true.plot_components(fig=fig)
except ImportError:
pass
# Now fit the model via MLE
with warnings.catch_warnings(record=True):
res = mod.fit(disp=-1)
# If we found a higher likelihood, no problem; otherwise check
# that we're very close to that found by R
if res.llf <= true['llf']:
assert_allclose(res.llf, true['llf'], rtol=1e-4)
# Smoke test for summary
res.summary()
def test_irregular(close_figures):
run_ucm('irregular')
def test_fixed_intercept(close_figures):
# Clear warnings
structural.__warningregistry__ = {}
warning = SpecificationWarning
match = 'Specified model does not contain'
with pytest.warns(warning, match=match):
run_ucm('fixed_intercept')
def test_deterministic_constant(close_figures):
run_ucm('deterministic_constant')
def test_random_walk(close_figures):
run_ucm('random_walk')
def test_local_level(close_figures):
run_ucm('local_level')
def test_fixed_slope(close_figures):
warning = SpecificationWarning
match = 'irregular component added'
with pytest.warns(warning, match=match):
run_ucm('fixed_slope')
def test_fixed_slope_warn(close_figures):
# Clear warnings
structural.__warningregistry__ = {}
warning = SpecificationWarning
match = 'irregular component added'
with pytest.warns(warning, match=match):
run_ucm('fixed_slope')
def test_deterministic_trend(close_figures):
run_ucm('deterministic_trend')
def test_random_walk_with_drift(close_figures):
run_ucm('random_walk_with_drift')
def test_local_linear_deterministic_trend(close_figures):
run_ucm('local_linear_deterministic_trend')
def test_local_linear_trend(close_figures):
run_ucm('local_linear_trend')
def test_smooth_trend(close_figures):
run_ucm('smooth_trend')
def test_random_trend(close_figures):
run_ucm('random_trend')
def test_cycle(close_figures):
run_ucm('cycle')
def test_seasonal(close_figures):
run_ucm('seasonal')
def test_freq_seasonal(close_figures):
run_ucm('freq_seasonal')
def test_reg(close_figures):
run_ucm('reg')
def test_rtrend_ar1(close_figures):
run_ucm('rtrend_ar1')
@pytest.mark.slow
def test_lltrend_cycle_seasonal_reg_ar1(close_figures):
run_ucm('lltrend_cycle_seasonal_reg_ar1')
def test_mle_reg():
endog = np.arange(100)*1.0
exog = endog*2
# Make the fit not-quite-perfect
endog[::2] += 0.01
endog[1::2] -= 0.01
with warnings.catch_warnings(record=True):
mod1 = UnobservedComponents(endog, irregular=True,
exog=exog, mle_regression=False)
res1 = mod1.fit(disp=-1)
mod2 = UnobservedComponents(endog, irregular=True,
exog=exog, mle_regression=True)
res2 = mod2.fit(disp=-1)
assert_allclose(res1.regression_coefficients.filtered[0, -1],
0.5,
atol=1e-5)
assert_allclose(res2.params[1], 0.5, atol=1e-5)
def test_specifications():
# Clear warnings
structural.__warningregistry__ = {}
endog = [1, 2]
# Test that when nothing specified, a warning is issued and the model that
# is fit is one with irregular=True and nothing else.
warning = SpecificationWarning
match = 'irregular component added'
with pytest.warns(warning, match=match):
mod = UnobservedComponents(endog)
assert_equal(mod.trend_specification, 'irregular')
# Test an invalid string trend specification
with pytest.raises(ValueError):
UnobservedComponents(endog, 'invalid spec')
# Test that if a trend component is specified without a level component,
# a warning is issued and a deterministic level component is added
warning = SpecificationWarning
match = 'Trend component specified without'
with pytest.warns(warning, match=match):
mod = UnobservedComponents(endog, trend=True, irregular=True)
assert_equal(mod.trend_specification, 'deterministic trend')
# Test that if a string specification is provided, a warning is issued if
# the boolean attributes are also specified
trend_attributes = ['irregular', 'trend', 'stochastic_level',
'stochastic_trend']
for attribute in trend_attributes:
kwargs = {attribute: True}
warning = SpecificationWarning
match = 'may be overridden when the trend'
with pytest.warns(warning, match=match):
UnobservedComponents(endog, 'deterministic trend', **kwargs)
# Test that a seasonal with period less than two is invalid
with pytest.raises(ValueError):
UnobservedComponents(endog, seasonal=1)
def test_start_params():
# Test that the behavior is correct for multiple exogenous and / or
# autoregressive components
# Parameters
nobs = int(1e4)
beta = np.r_[10, -2]
phi = np.r_[0.5, 0.1]
# Generate data
np.random.seed(1234)
exog = np.c_[np.ones(nobs), np.arange(nobs)*1.0]
eps = np.random.normal(size=nobs)
endog = np.zeros(nobs+2)
for t in range(1, nobs):
endog[t+1] = phi[0] * endog[t] + phi[1] * endog[t-1] + eps[t]
endog = endog[2:]
endog += np.dot(exog, beta)
# Now just test that the starting parameters are approximately what they
# ought to be (could make this arbitrarily precise by increasing nobs,
# but that would slow down the test for no real gain)
mod = UnobservedComponents(endog, exog=exog, autoregressive=2)
assert_allclose(mod.start_params, [1., 0.5, 0.1, 10, -2], atol=1e-1)
def test_forecast():
endog = np.arange(50) + 10
exog = np.arange(50)
mod = UnobservedComponents(endog, exog=exog, level='dconstant', seasonal=4)
res = mod.smooth([1e-15, 0, 1])
actual = res.forecast(10, exog=np.arange(50, 60)[:, np.newaxis])
desired = np.arange(50, 60) + 10
assert_allclose(actual, desired)
def test_misc_exog():
# Tests for missing data
nobs = 20
k_endog = 1
np.random.seed(1208)
endog = np.random.normal(size=(nobs, k_endog))
endog[:4, 0] = np.nan
exog1 = np.random.normal(size=(nobs, 1))
exog2 = np.random.normal(size=(nobs, 2))
index = pd.date_range('1970-01-01', freq='QS', periods=nobs)
endog_pd = pd.DataFrame(endog, index=index)
exog1_pd = pd.Series(exog1.squeeze(), index=index)
exog2_pd = pd.DataFrame(exog2, index=index)
models = [
UnobservedComponents(endog, 'llevel', exog=exog1),
UnobservedComponents(endog, 'llevel', exog=exog2),
UnobservedComponents(endog, 'llevel', exog=exog2),
UnobservedComponents(endog_pd, 'llevel', exog=exog1_pd),
UnobservedComponents(endog_pd, 'llevel', exog=exog2_pd),
UnobservedComponents(endog_pd, 'llevel', exog=exog2_pd),
]
for mod in models:
# Smoke tests
mod.start_params
res = mod.fit(disp=False)
res.summary()
res.predict()
res.predict(dynamic=True)
res.get_prediction()
oos_exog = np.random.normal(size=(1, mod.k_exog))
res.forecast(steps=1, exog=oos_exog)
res.get_forecast(steps=1, exog=oos_exog)
# Smoke tests for invalid exog
oos_exog = np.random.normal(size=(2, mod.k_exog))
with pytest.raises(ValueError):
res.forecast(steps=1, exog=oos_exog)
oos_exog = np.random.normal(size=(1, mod.k_exog + 1))
with pytest.raises(ValueError):
res.forecast(steps=1, exog=oos_exog)
# Test invalid model specifications
with pytest.raises(ValueError):
UnobservedComponents(endog, 'llevel', exog=np.zeros((10, 4)))
def test_predict_custom_index():
np.random.seed(328423)
endog = pd.DataFrame(np.random.normal(size=50))
mod = structural.UnobservedComponents(endog, 'llevel')
res = mod.smooth(mod.start_params)
out = res.predict(start=1, end=1, index=['a'])
assert_equal(out.index.equals(pd.Index(['a'])), True)
def test_matrices_somewhat_complicated_model():
values = dta.copy()
model = UnobservedComponents(values['unemp'],
level='lltrend',
freq_seasonal=[{'period': 4},
{'period': 9, 'harmonics': 3}],
cycle=True,
cycle_period_bounds=[2, 30],
damped_cycle=True,
stochastic_freq_seasonal=[True, False],
stochastic_cycle=True
)
# Selected parameters
params = [1, # irregular_var
3, 4, # lltrend parameters: level_var, trend_var
5, # freq_seasonal parameters: freq_seasonal_var_0
# cycle parameters: cycle_var, cycle_freq, cycle_damp
6, 2*np.pi/30., .9
]
model.update(params)
# Check scalar properties
assert_equal(model.k_states, 2 + 4 + 6 + 2)
assert_equal(model.k_state_cov, 2 + 1 + 0 + 1)
assert_equal(model.loglikelihood_burn, 2 + 4 + 6 + 2)
assert_allclose(model.ssm.k_posdef, 2 + 4 + 0 + 2)
assert_equal(model.k_params, len(params))
# Check the statespace model matrices against hand-constructed answers
# We group the terms by the component
expected_design = np.r_[[1, 0],
[1, 0, 1, 0],
[1, 0, 1, 0, 1, 0],
[1, 0]].reshape(1, 14)
assert_allclose(model.ssm.design[:, :, 0], expected_design)
expected_transition = __direct_sum([
np.array([[1, 1],
[0, 1]]),
np.array([[0, 1, 0, 0],
[-1, 0, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, -1]]),
np.array([[np.cos(2*np.pi*1/9.), np.sin(2*np.pi*1/9.), 0, 0, 0, 0],
[-np.sin(2*np.pi*1/9.), np.cos(2*np.pi*1/9.), 0, 0, 0, 0],
[0, 0, np.cos(2*np.pi*2/9.), np.sin(2*np.pi*2/9.), 0, 0],
[0, 0, -np.sin(2*np.pi*2/9.), np.cos(2*np.pi*2/9.), 0, 0],
[0, 0, 0, 0, np.cos(2*np.pi/3.), np.sin(2*np.pi/3.)],
[0, 0, 0, 0, -np.sin(2*np.pi/3.), np.cos(2*np.pi/3.)]]),
np.array([[.9*np.cos(2*np.pi/30.), .9*np.sin(2*np.pi/30.)],
[-.9*np.sin(2*np.pi/30.), .9*np.cos(2*np.pi/30.)]])
])
assert_allclose(
model.ssm.transition[:, :, 0], expected_transition, atol=1e-7)
# Since the second seasonal term is not stochastic,
# the dimensionality of the state disturbance is 14 - 6 = 8
expected_selection = np.zeros((14, 14 - 6))
expected_selection[0:2, 0:2] = np.eye(2)
expected_selection[2:6, 2:6] = np.eye(4)
expected_selection[-2:, -2:] = np.eye(2)
assert_allclose(model.ssm.selection[:, :, 0], expected_selection)
expected_state_cov = __direct_sum([
np.diag(params[1:3]),
np.eye(4)*params[3],
np.eye(2)*params[4]
])
assert_allclose(model.ssm.state_cov[:, :, 0], expected_state_cov)
def __direct_sum(square_matrices):
"""Compute the matrix direct sum of an iterable of square numpy 2-d arrays
"""
new_shape = np.sum([m.shape for m in square_matrices], axis=0)
new_array = np.zeros(new_shape)
offset = 0
for m in square_matrices:
rows, cols = m.shape
assert rows == cols
new_array[offset:offset + rows, offset:offset + rows] = m
offset += rows
return new_array
def test_forecast_exog():
# Test forecasting with various shapes of `exog`
nobs = 100
endog = np.ones(nobs) * 2.0
exog = np.ones(nobs)
mod = UnobservedComponents(endog, 'irregular', exog=exog)
res = mod.smooth([1.0, 2.0])
# 1-step-ahead, valid
exog_fcast_scalar = 1.
exog_fcast_1dim = np.ones(1)
exog_fcast_2dim = np.ones((1, 1))
assert_allclose(res.forecast(1, exog=exog_fcast_scalar), 2.)
assert_allclose(res.forecast(1, exog=exog_fcast_1dim), 2.)
assert_allclose(res.forecast(1, exog=exog_fcast_2dim), 2.)
# h-steps-ahead, valid
h = 10
exog_fcast_1dim = np.ones(h)
exog_fcast_2dim = np.ones((h, 1))
assert_allclose(res.forecast(h, exog=exog_fcast_1dim), 2.)
assert_allclose(res.forecast(h, exog=exog_fcast_2dim), 2.)
# h-steps-ahead, invalid
assert_raises(ValueError, res.forecast, h, exog=1.)
assert_raises(ValueError, res.forecast, h, exog=[1, 2])
assert_raises(ValueError, res.forecast, h, exog=np.ones((h, 2)))
def check_equivalent_models(mod, mod2):
attrs = [
'level', 'trend', 'seasonal_periods', 'seasonal',
'freq_seasonal_periods', 'freq_seasonal_harmonics', 'freq_seasonal',
'cycle', 'ar_order', 'autoregressive', 'irregular', 'stochastic_level',
'stochastic_trend', 'stochastic_seasonal', 'stochastic_freq_seasonal',
'stochastic_cycle', 'damped_cycle', 'mle_regression',
'trend_specification', 'trend_mask', 'regression',
'cycle_frequency_bound']
ssm_attrs = [
'nobs', 'k_endog', 'k_states', 'k_posdef', 'obs_intercept', 'design',
'obs_cov', 'state_intercept', 'transition', 'selection', 'state_cov']
for attr in attrs:
assert_equal(getattr(mod2, attr), getattr(mod, attr))
for attr in ssm_attrs:
assert_equal(getattr(mod2.ssm, attr), getattr(mod.ssm, attr))
assert_equal(mod2._get_init_kwds(), mod._get_init_kwds())
def test_recreate_model():
nobs = 100
endog = np.ones(nobs) * 2.0
exog = np.ones(nobs)
levels = [
'irregular', 'ntrend', 'fixed intercept', 'deterministic constant',
'dconstant', 'local level', 'llevel', 'random walk', 'rwalk',
'fixed slope', 'deterministic trend', 'dtrend',
'local linear deterministic trend', 'lldtrend',
'random walk with drift', 'rwdrift', 'local linear trend',
'lltrend', 'smooth trend', 'strend', 'random trend', 'rtrend']
for level in levels:
# Note: have to add in some stochastic component, otherwise we have
# problems with entirely deterministic models
# level + stochastic seasonal
mod = UnobservedComponents(endog, level=level, seasonal=2,
stochastic_seasonal=True, exog=exog)
mod2 = UnobservedComponents(endog, exog=exog, **mod._get_init_kwds())
check_equivalent_models(mod, mod2)
# level + autoregressive
mod = UnobservedComponents(endog, level=level, exog=exog,
autoregressive=1)
mod2 = UnobservedComponents(endog, exog=exog, **mod._get_init_kwds())
check_equivalent_models(mod, mod2)
# level + stochastic cycle
mod = UnobservedComponents(endog, level=level, exog=exog,
cycle=True, stochastic_cycle=True,
damped_cycle=True)
mod2 = UnobservedComponents(endog, exog=exog, **mod._get_init_kwds())
check_equivalent_models(mod, mod2)
def test_append_results():
endog = np.arange(100)
exog = np.ones_like(endog)
params = [1., 1., 0.1, 1.]
mod1 = UnobservedComponents(endog, 'llevel', exog=exog)
res1 = mod1.smooth(params)
mod2 = UnobservedComponents(endog[:50], 'llevel', exog=exog[:50])
res2 = mod2.smooth(params)
res3 = res2.append(endog[50:], exog=exog[50:])
assert_equal(res1.specification, res3.specification)
assert_allclose(res3.cov_params_default, res2.cov_params_default)
for attr in ['nobs', 'llf', 'llf_obs', 'loglikelihood_burn']:
assert_equal(getattr(res3, attr), getattr(res1, attr))
for attr in [
'filtered_state', 'filtered_state_cov', 'predicted_state',
'predicted_state_cov', 'forecasts', 'forecasts_error',
'forecasts_error_cov', 'standardized_forecasts_error',
'forecasts_error_diffuse_cov', 'predicted_diffuse_state_cov',
'scaled_smoothed_estimator',
'scaled_smoothed_estimator_cov', 'smoothing_error',
'smoothed_state',
'smoothed_state_cov', 'smoothed_state_autocov',
'smoothed_measurement_disturbance',
'smoothed_state_disturbance',
'smoothed_measurement_disturbance_cov',
'smoothed_state_disturbance_cov']:
assert_equal(getattr(res3, attr), getattr(res1, attr))
assert_allclose(res3.forecast(10, exog=np.ones(10)),
res1.forecast(10, exog=np.ones(10)))
def test_extend_results():
endog = np.arange(100)
exog = np.ones_like(endog)
params = [1., 1., 0.1, 1.]
mod1 = UnobservedComponents(endog, 'llevel', exog=exog)
res1 = mod1.smooth(params)
mod2 = UnobservedComponents(endog[:50], 'llevel', exog=exog[:50])
res2 = mod2.smooth(params)
res3 = res2.extend(endog[50:], exog=exog[50:])
assert_allclose(res3.llf_obs, res1.llf_obs[50:])
for attr in [
'filtered_state', 'filtered_state_cov', 'predicted_state',
'predicted_state_cov', 'forecasts', 'forecasts_error',
'forecasts_error_cov', 'standardized_forecasts_error',
'forecasts_error_diffuse_cov', 'predicted_diffuse_state_cov',
'scaled_smoothed_estimator',
'scaled_smoothed_estimator_cov', 'smoothing_error',
'smoothed_state',
'smoothed_state_cov', 'smoothed_state_autocov',
'smoothed_measurement_disturbance',
'smoothed_state_disturbance',
'smoothed_measurement_disturbance_cov',
'smoothed_state_disturbance_cov']:
desired = getattr(res1, attr)
if desired is not None:
desired = desired[..., 50:]
assert_equal(getattr(res3, attr), desired)
assert_allclose(res3.forecast(10, exog=np.ones(10)),
res1.forecast(10, exog=np.ones(10)))
def test_apply_results():
endog = np.arange(100)
exog = np.ones_like(endog)
params = [1., 1., 0.1, 1.]
mod1 = UnobservedComponents(endog[:50], 'llevel', exog=exog[:50])
res1 = mod1.smooth(params)
mod2 = UnobservedComponents(endog[50:], 'llevel', exog=exog[50:])
res2 = mod2.smooth(params)
res3 = res2.apply(endog[:50], exog=exog[:50])
assert_equal(res1.specification, res3.specification)
assert_allclose(res3.cov_params_default, res2.cov_params_default)
for attr in ['nobs', 'llf', 'llf_obs', 'loglikelihood_burn']:
assert_equal(getattr(res3, attr), getattr(res1, attr))
for attr in [
'filtered_state', 'filtered_state_cov', 'predicted_state',
'predicted_state_cov', 'forecasts', 'forecasts_error',
'forecasts_error_cov', 'standardized_forecasts_error',
'forecasts_error_diffuse_cov', 'predicted_diffuse_state_cov',
'scaled_smoothed_estimator',
'scaled_smoothed_estimator_cov', 'smoothing_error',
'smoothed_state',
'smoothed_state_cov', 'smoothed_state_autocov',
'smoothed_measurement_disturbance',
'smoothed_state_disturbance',
'smoothed_measurement_disturbance_cov',
'smoothed_state_disturbance_cov']:
assert_equal(getattr(res3, attr), getattr(res1, attr))
assert_allclose(res3.forecast(10, exog=np.ones(10)),
res1.forecast(10, exog=np.ones(10)))
|
[
"numpy.random.seed",
"numpy.sum",
"numpy.ones",
"matplotlib.pyplot.figure",
"numpy.sin",
"numpy.arange",
"numpy.random.normal",
"statsmodels.datasets.macrodata.load_pandas",
"numpy.diag",
"pandas.DataFrame",
"pytest.warns",
"pytest.raises",
"warnings.catch_warnings",
"numpy.testing.assert_equal",
"numpy.testing.assert_allclose",
"pandas.date_range",
"numpy.ones_like",
"numpy.testing.assert_raises",
"pandas.Index",
"numpy.cos",
"numpy.dot",
"statsmodels.tsa.statespace.structural.UnobservedComponents",
"numpy.log",
"numpy.zeros",
"pandas.plotting.register_matplotlib_converters",
"numpy.array",
"numpy.eye"
] |
[((580, 642), 'pandas.date_range', 'pd.date_range', ([], {'start': '"""1959-01-01"""', 'end': '"""2009-07-01"""', 'freq': '"""QS"""'}), "(start='1959-01-01', end='2009-07-01', freq='QS')\n", (593, 642), True, 'import pandas as pd\n'), ((539, 562), 'statsmodels.datasets.macrodata.load_pandas', 'macrodata.load_pandas', ([], {}), '()\n', (560, 562), False, 'from statsmodels.datasets import macrodata\n'), ((6259, 6337), 'numpy.testing.assert_allclose', 'assert_allclose', (['res1.regression_coefficients.filtered[0, -1]', '(0.5)'], {'atol': '(1e-05)'}), '(res1.regression_coefficients.filtered[0, -1], 0.5, atol=1e-05)\n', (6274, 6337), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((6381, 6429), 'numpy.testing.assert_allclose', 'assert_allclose', (['res2.params[1]', '(0.5)'], {'atol': '(1e-05)'}), '(res2.params[1], 0.5, atol=1e-05)\n', (6396, 6429), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((8370, 8390), 'numpy.random.seed', 'np.random.seed', (['(1234)'], {}), '(1234)\n', (8384, 8390), True, 'import numpy as np\n'), ((8454, 8481), 'numpy.random.normal', 'np.random.normal', ([], {'size': 'nobs'}), '(size=nobs)\n', (8470, 8481), True, 'import numpy as np\n'), ((8494, 8512), 'numpy.zeros', 'np.zeros', (['(nobs + 2)'], {}), '(nobs + 2)\n', (8502, 8512), True, 'import numpy as np\n'), ((8645, 8663), 'numpy.dot', 'np.dot', (['exog', 'beta'], {}), '(exog, beta)\n', (8651, 8663), True, 'import numpy as np\n'), ((8885, 8941), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'exog': 'exog', 'autoregressive': '(2)'}), '(endog, exog=exog, autoregressive=2)\n', (8905, 8941), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((8946, 9014), 'numpy.testing.assert_allclose', 'assert_allclose', (['mod.start_params', '[1.0, 0.5, 0.1, 10, -2]'], {'atol': '(0.1)'}), '(mod.start_params, [1.0, 0.5, 0.1, 10, -2], atol=0.1)\n', (8961, 9014), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((9080, 9093), 'numpy.arange', 'np.arange', (['(50)'], {}), '(50)\n', (9089, 9093), True, 'import numpy as np\n'), ((9105, 9174), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'exog': 'exog', 'level': '"""dconstant"""', 'seasonal': '(4)'}), "(endog, exog=exog, level='dconstant', seasonal=4)\n", (9125, 9174), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((9322, 9354), 'numpy.testing.assert_allclose', 'assert_allclose', (['actual', 'desired'], {}), '(actual, desired)\n', (9337, 9354), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((9442, 9462), 'numpy.random.seed', 'np.random.seed', (['(1208)'], {}), '(1208)\n', (9456, 9462), True, 'import numpy as np\n'), ((9475, 9513), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(nobs, k_endog)'}), '(size=(nobs, k_endog))\n', (9491, 9513), True, 'import numpy as np\n'), ((9552, 9584), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(nobs, 1)'}), '(size=(nobs, 1))\n', (9568, 9584), True, 'import numpy as np\n'), ((9597, 9629), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(nobs, 2)'}), '(size=(nobs, 2))\n', (9613, 9629), True, 'import numpy as np\n'), ((9643, 9695), 'pandas.date_range', 'pd.date_range', (['"""1970-01-01"""'], {'freq': '"""QS"""', 'periods': 'nobs'}), "('1970-01-01', freq='QS', periods=nobs)\n", (9656, 9695), True, 'import pandas as pd\n'), ((9711, 9743), 'pandas.DataFrame', 'pd.DataFrame', (['endog'], {'index': 'index'}), '(endog, index=index)\n', (9723, 9743), True, 'import pandas as pd\n'), ((9814, 9846), 'pandas.DataFrame', 'pd.DataFrame', (['exog2'], {'index': 'index'}), '(exog2, index=index)\n', (9826, 9846), True, 'import pandas as pd\n'), ((11131, 11153), 'numpy.random.seed', 'np.random.seed', (['(328423)'], {}), '(328423)\n', (11145, 11153), True, 'import numpy as np\n'), ((11216, 11264), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'structural.UnobservedComponents', (['endog', '"""llevel"""'], {}), "(endog, 'llevel')\n", (11247, 11264), False, 'from statsmodels.tsa.statespace import structural\n'), ((11500, 11752), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (["values['unemp']"], {'level': '"""lltrend"""', 'freq_seasonal': "[{'period': 4}, {'period': 9, 'harmonics': 3}]", 'cycle': '(True)', 'cycle_period_bounds': '[2, 30]', 'damped_cycle': '(True)', 'stochastic_freq_seasonal': '[True, False]', 'stochastic_cycle': '(True)'}), "(values['unemp'], level='lltrend', freq_seasonal=[{\n 'period': 4}, {'period': 9, 'harmonics': 3}], cycle=True,\n cycle_period_bounds=[2, 30], damped_cycle=True,\n stochastic_freq_seasonal=[True, False], stochastic_cycle=True)\n", (11520, 11752), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((12422, 12465), 'numpy.testing.assert_equal', 'assert_equal', (['model.k_states', '(2 + 4 + 6 + 2)'], {}), '(model.k_states, 2 + 4 + 6 + 2)\n', (12434, 12465), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((12470, 12516), 'numpy.testing.assert_equal', 'assert_equal', (['model.k_state_cov', '(2 + 1 + 0 + 1)'], {}), '(model.k_state_cov, 2 + 1 + 0 + 1)\n', (12482, 12516), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((12521, 12574), 'numpy.testing.assert_equal', 'assert_equal', (['model.loglikelihood_burn', '(2 + 4 + 6 + 2)'], {}), '(model.loglikelihood_burn, 2 + 4 + 6 + 2)\n', (12533, 12574), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((12579, 12629), 'numpy.testing.assert_allclose', 'assert_allclose', (['model.ssm.k_posdef', '(2 + 4 + 0 + 2)'], {}), '(model.ssm.k_posdef, 2 + 4 + 0 + 2)\n', (12594, 12629), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((12975, 13034), 'numpy.testing.assert_allclose', 'assert_allclose', (['model.ssm.design[:, :, 0]', 'expected_design'], {}), '(model.ssm.design[:, :, 0], expected_design)\n', (12990, 13034), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((13869, 13948), 'numpy.testing.assert_allclose', 'assert_allclose', (['model.ssm.transition[:, :, 0]', 'expected_transition'], {'atol': '(1e-07)'}), '(model.ssm.transition[:, :, 0], expected_transition, atol=1e-07)\n', (13884, 13948), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((14103, 14125), 'numpy.zeros', 'np.zeros', (['(14, 14 - 6)'], {}), '((14, 14 - 6))\n', (14111, 14125), True, 'import numpy as np\n'), ((14161, 14170), 'numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (14167, 14170), True, 'import numpy as np\n'), ((14206, 14215), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (14212, 14215), True, 'import numpy as np\n'), ((14251, 14260), 'numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (14257, 14260), True, 'import numpy as np\n'), ((14265, 14330), 'numpy.testing.assert_allclose', 'assert_allclose', (['model.ssm.selection[:, :, 0]', 'expected_selection'], {}), '(model.ssm.selection[:, :, 0], expected_selection)\n', (14280, 14330), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((14470, 14535), 'numpy.testing.assert_allclose', 'assert_allclose', (['model.ssm.state_cov[:, :, 0]', 'expected_state_cov'], {}), '(model.ssm.state_cov[:, :, 0], expected_state_cov)\n', (14485, 14535), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((14676, 14726), 'numpy.sum', 'np.sum', (['[m.shape for m in square_matrices]'], {'axis': '(0)'}), '([m.shape for m in square_matrices], axis=0)\n', (14682, 14726), True, 'import numpy as np\n'), ((14743, 14762), 'numpy.zeros', 'np.zeros', (['new_shape'], {}), '(new_shape)\n', (14751, 14762), True, 'import numpy as np\n'), ((15114, 15127), 'numpy.ones', 'np.ones', (['nobs'], {}), '(nobs)\n', (15121, 15127), True, 'import numpy as np\n'), ((15139, 15190), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog', '"""irregular"""'], {'exog': 'exog'}), "(endog, 'irregular', exog=exog)\n", (15159, 15190), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((15300, 15310), 'numpy.ones', 'np.ones', (['(1)'], {}), '(1)\n', (15307, 15310), True, 'import numpy as np\n'), ((15333, 15348), 'numpy.ones', 'np.ones', (['(1, 1)'], {}), '((1, 1))\n', (15340, 15348), True, 'import numpy as np\n'), ((15602, 15612), 'numpy.ones', 'np.ones', (['h'], {}), '(h)\n', (15609, 15612), True, 'import numpy as np\n'), ((15635, 15650), 'numpy.ones', 'np.ones', (['(h, 1)'], {}), '((h, 1))\n', (15642, 15650), True, 'import numpy as np\n'), ((15812, 15864), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'res.forecast', 'h'], {'exog': '(1.0)'}), '(ValueError, res.forecast, h, exog=1.0)\n', (15825, 15864), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((15868, 15923), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'res.forecast', 'h'], {'exog': '[1, 2]'}), '(ValueError, res.forecast, h, exog=[1, 2])\n', (15881, 15923), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((17006, 17019), 'numpy.ones', 'np.ones', (['nobs'], {}), '(nobs)\n', (17013, 17019), True, 'import numpy as np\n'), ((18549, 18563), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (18558, 18563), True, 'import numpy as np\n'), ((18575, 18594), 'numpy.ones_like', 'np.ones_like', (['endog'], {}), '(endog)\n', (18587, 18594), True, 'import numpy as np\n'), ((18638, 18686), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog', '"""llevel"""'], {'exog': 'exog'}), "(endog, 'llevel', exog=exog)\n", (18658, 18686), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((18730, 18788), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog[:50]', '"""llevel"""'], {'exog': 'exog[:50]'}), "(endog[:50], 'llevel', exog=exog[:50])\n", (18750, 18788), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((18876, 18928), 'numpy.testing.assert_equal', 'assert_equal', (['res1.specification', 'res3.specification'], {}), '(res1.specification, res3.specification)\n', (18888, 18928), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((18934, 18999), 'numpy.testing.assert_allclose', 'assert_allclose', (['res3.cov_params_default', 'res2.cov_params_default'], {}), '(res3.cov_params_default, res2.cov_params_default)\n', (18949, 18999), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((20030, 20044), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (20039, 20044), True, 'import numpy as np\n'), ((20056, 20075), 'numpy.ones_like', 'np.ones_like', (['endog'], {}), '(endog)\n', (20068, 20075), True, 'import numpy as np\n'), ((20119, 20167), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog', '"""llevel"""'], {'exog': 'exog'}), "(endog, 'llevel', exog=exog)\n", (20139, 20167), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((20211, 20269), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog[:50]', '"""llevel"""'], {'exog': 'exog[:50]'}), "(endog[:50], 'llevel', exog=exog[:50])\n", (20231, 20269), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((20358, 20406), 'numpy.testing.assert_allclose', 'assert_allclose', (['res3.llf_obs', 'res1.llf_obs[50:]'], {}), '(res3.llf_obs, res1.llf_obs[50:])\n', (20373, 20406), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((21405, 21419), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (21414, 21419), True, 'import numpy as np\n'), ((21431, 21450), 'numpy.ones_like', 'np.ones_like', (['endog'], {}), '(endog)\n', (21443, 21450), True, 'import numpy as np\n'), ((21494, 21552), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog[:50]', '"""llevel"""'], {'exog': 'exog[:50]'}), "(endog[:50], 'llevel', exog=exog[:50])\n", (21514, 21552), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((21596, 21654), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog[50:]', '"""llevel"""'], {'exog': 'exog[50:]'}), "(endog[50:], 'llevel', exog=exog[50:])\n", (21616, 21654), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((21742, 21794), 'numpy.testing.assert_equal', 'assert_equal', (['res1.specification', 'res3.specification'], {}), '(res1.specification, res3.specification)\n', (21754, 21794), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((21800, 21865), 'numpy.testing.assert_allclose', 'assert_allclose', (['res3.cov_params_default', 'res2.cov_params_default'], {}), '(res3.cov_params_default, res2.cov_params_default)\n', (21815, 21865), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((1453, 1500), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (["values['unemp']"], {}), "(values['unemp'], **kwargs)\n", (1473, 1500), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((1764, 1808), 'numpy.testing.assert_allclose', 'assert_allclose', (['mod.start_params', 'roundtrip'], {}), '(mod.start_params, roundtrip)\n', (1779, 1808), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((2628, 2745), 'numpy.testing.assert_equal', 'assert_equal', (['mod.cycle_frequency_bound', '(2 * np.pi / cycle_period_bounds[1], 2 * np.pi / cycle_period_bounds[0])'], {}), '(mod.cycle_frequency_bound, (2 * np.pi / cycle_period_bounds[1],\n 2 * np.pi / cycle_period_bounds[0]))\n', (2640, 2745), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((2909, 2973), 'numpy.testing.assert_allclose', 'assert_allclose', (['res_true.llf', "true['llf']"], {'rtol': 'rtol', 'atol': 'atol'}), "(res_true.llf, true['llf'], rtol=rtol, atol=atol)\n", (2924, 2973), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((4076, 4110), 'pytest.warns', 'pytest.warns', (['warning'], {'match': 'match'}), '(warning, match=match)\n', (4088, 4110), False, 'import pytest\n'), ((4490, 4524), 'pytest.warns', 'pytest.warns', (['warning'], {'match': 'match'}), '(warning, match=match)\n', (4502, 4524), False, 'import pytest\n'), ((4747, 4781), 'pytest.warns', 'pytest.warns', (['warning'], {'match': 'match'}), '(warning, match=match)\n', (4759, 4781), False, 'import pytest\n'), ((5762, 5776), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (5771, 5776), True, 'import numpy as np\n'), ((5894, 5930), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (5917, 5930), False, 'import warnings\n'), ((5947, 6023), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'irregular': '(True)', 'exog': 'exog', 'mle_regression': '(False)'}), '(endog, irregular=True, exog=exog, mle_regression=False)\n', (5967, 6023), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((6109, 6184), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'irregular': '(True)', 'exog': 'exog', 'mle_regression': '(True)'}), '(endog, irregular=True, exog=exog, mle_regression=True)\n', (6129, 6184), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((6761, 6795), 'pytest.warns', 'pytest.warns', (['warning'], {'match': 'match'}), '(warning, match=match)\n', (6773, 6795), False, 'import pytest\n'), ((6811, 6838), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {}), '(endog)\n', (6831, 6838), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((6847, 6897), 'numpy.testing.assert_equal', 'assert_equal', (['mod.trend_specification', '"""irregular"""'], {}), "(mod.trend_specification, 'irregular')\n", (6859, 6897), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((6957, 6982), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6970, 6982), False, 'import pytest\n'), ((6992, 7035), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog', '"""invalid spec"""'], {}), "(endog, 'invalid spec')\n", (7012, 7035), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((7277, 7311), 'pytest.warns', 'pytest.warns', (['warning'], {'match': 'match'}), '(warning, match=match)\n', (7289, 7311), False, 'import pytest\n'), ((7327, 7382), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'trend': '(True)', 'irregular': '(True)'}), '(endog, trend=True, irregular=True)\n', (7347, 7382), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((7391, 7451), 'numpy.testing.assert_equal', 'assert_equal', (['mod.trend_specification', '"""deterministic trend"""'], {}), "(mod.trend_specification, 'deterministic trend')\n", (7403, 7451), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((8050, 8075), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (8063, 8075), False, 'import pytest\n'), ((8085, 8124), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'seasonal': '(1)'}), '(endog, seasonal=1)\n', (8105, 8124), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((9050, 9063), 'numpy.arange', 'np.arange', (['(50)'], {}), '(50)\n', (9059, 9063), True, 'import numpy as np\n'), ((9295, 9312), 'numpy.arange', 'np.arange', (['(50)', '(60)'], {}), '(50, 60)\n', (9304, 9312), True, 'import numpy as np\n'), ((9871, 9920), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog', '"""llevel"""'], {'exog': 'exog1'}), "(endog, 'llevel', exog=exog1)\n", (9891, 9920), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((9930, 9979), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog', '"""llevel"""'], {'exog': 'exog2'}), "(endog, 'llevel', exog=exog2)\n", (9950, 9979), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((9989, 10038), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog', '"""llevel"""'], {'exog': 'exog2'}), "(endog, 'llevel', exog=exog2)\n", (10009, 10038), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((10048, 10103), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog_pd', '"""llevel"""'], {'exog': 'exog1_pd'}), "(endog_pd, 'llevel', exog=exog1_pd)\n", (10068, 10103), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((10113, 10168), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog_pd', '"""llevel"""'], {'exog': 'exog2_pd'}), "(endog_pd, 'llevel', exog=exog2_pd)\n", (10133, 10168), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((10178, 10233), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog_pd', '"""llevel"""'], {'exog': 'exog2_pd'}), "(endog_pd, 'llevel', exog=exog2_pd)\n", (10198, 10233), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((10473, 10511), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(1, mod.k_exog)'}), '(size=(1, mod.k_exog))\n', (10489, 10511), True, 'import numpy as np\n'), ((10665, 10703), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(2, mod.k_exog)'}), '(size=(2, mod.k_exog))\n', (10681, 10703), True, 'import numpy as np\n'), ((10813, 10855), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(1, mod.k_exog + 1)'}), '(size=(1, mod.k_exog + 1))\n', (10829, 10855), True, 'import numpy as np\n'), ((10995, 11020), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (11008, 11020), False, 'import pytest\n'), ((11179, 11204), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(50)'}), '(size=50)\n', (11195, 11204), True, 'import numpy as np\n'), ((15083, 15096), 'numpy.ones', 'np.ones', (['nobs'], {}), '(nobs)\n', (15090, 15096), True, 'import numpy as np\n'), ((16975, 16988), 'numpy.ones', 'np.ones', (['nobs'], {}), '(nobs)\n', (16982, 16988), True, 'import numpy as np\n'), ((17641, 17735), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'level': 'level', 'seasonal': '(2)', 'stochastic_seasonal': '(True)', 'exog': 'exog'}), '(endog, level=level, seasonal=2, stochastic_seasonal=\n True, exog=exog)\n', (17661, 17735), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((17935, 18004), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'level': 'level', 'exog': 'exog', 'autoregressive': '(1)'}), '(endog, level=level, exog=exog, autoregressive=1)\n', (17955, 18004), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((18211, 18320), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog'], {'level': 'level', 'exog': 'exog', 'cycle': '(True)', 'stochastic_cycle': '(True)', 'damped_cycle': '(True)'}), '(endog, level=level, exog=exog, cycle=True,\n stochastic_cycle=True, damped_cycle=True)\n', (18231, 18320), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((1204, 1229), 'numpy.log', 'np.log', (["values['realgdp']"], {}), "(values['realgdp'])\n", (1210, 1229), True, 'import numpy as np\n'), ((3294, 3306), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3304, 3306), True, 'import matplotlib.pyplot as plt\n'), ((3448, 3484), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (3471, 3484), False, 'import warnings\n'), ((7867, 7901), 'pytest.warns', 'pytest.warns', (['warning'], {'match': 'match'}), '(warning, match=match)\n', (7879, 7901), False, 'import pytest\n'), ((7915, 7975), 'statsmodels.tsa.statespace.structural.UnobservedComponents', 'UnobservedComponents', (['endog', '"""deterministic trend"""'], {}), "(endog, 'deterministic trend', **kwargs)\n", (7935, 7975), False, 'from statsmodels.tsa.statespace.structural import UnobservedComponents\n'), ((8408, 8421), 'numpy.ones', 'np.ones', (['nobs'], {}), '(nobs)\n', (8415, 8421), True, 'import numpy as np\n'), ((10717, 10742), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10730, 10742), False, 'import pytest\n'), ((10869, 10894), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10882, 10894), False, 'import pytest\n'), ((11389, 11404), 'pandas.Index', 'pd.Index', (["['a']"], {}), "(['a'])\n", (11397, 11404), True, 'import pandas as pd\n'), ((13085, 13111), 'numpy.array', 'np.array', (['[[1, 1], [0, 1]]'], {}), '([[1, 1], [0, 1]])\n', (13093, 13111), True, 'import numpy as np\n'), ((13139, 13208), 'numpy.array', 'np.array', (['[[0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, -1, 0], [0, 0, 0, -1]]'], {}), '([[0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, -1, 0], [0, 0, 0, -1]])\n', (13147, 13208), True, 'import numpy as np\n'), ((14380, 14400), 'numpy.diag', 'np.diag', (['params[1:3]'], {}), '(params[1:3])\n', (14387, 14400), True, 'import numpy as np\n'), ((15976, 15991), 'numpy.ones', 'np.ones', (['(h, 2)'], {}), '((h, 2))\n', (15983, 15991), True, 'import numpy as np\n'), ((3190, 3222), 'pandas.plotting.register_matplotlib_converters', 'register_matplotlib_converters', ([], {}), '()\n', (3220, 3222), False, 'from pandas.plotting import register_matplotlib_converters\n'), ((3706, 3756), 'numpy.testing.assert_allclose', 'assert_allclose', (['res.llf', "true['llf']"], {'rtol': '(0.0001)'}), "(res.llf, true['llf'], rtol=0.0001)\n", (3721, 3756), False, 'from numpy.testing import assert_equal, assert_allclose, assert_raises\n'), ((8423, 8438), 'numpy.arange', 'np.arange', (['nobs'], {}), '(nobs)\n', (8432, 8438), True, 'import numpy as np\n'), ((9247, 9264), 'numpy.arange', 'np.arange', (['(50)', '(60)'], {}), '(50, 60)\n', (9256, 9264), True, 'import numpy as np\n'), ((11073, 11090), 'numpy.zeros', 'np.zeros', (['(10, 4)'], {}), '((10, 4))\n', (11081, 11090), True, 'import numpy as np\n'), ((14410, 14419), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (14416, 14419), True, 'import numpy as np\n'), ((14439, 14448), 'numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (14445, 14448), True, 'import numpy as np\n'), ((19918, 19929), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (19925, 19929), True, 'import numpy as np\n'), ((19975, 19986), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (19982, 19986), True, 'import numpy as np\n'), ((21294, 21305), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (21301, 21305), True, 'import numpy as np\n'), ((21351, 21362), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (21358, 21362), True, 'import numpy as np\n'), ((22784, 22795), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (22791, 22795), True, 'import numpy as np\n'), ((22841, 22852), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (22848, 22852), True, 'import numpy as np\n'), ((13285, 13312), 'numpy.cos', 'np.cos', (['(2 * np.pi * 1 / 9.0)'], {}), '(2 * np.pi * 1 / 9.0)\n', (13291, 13312), True, 'import numpy as np\n'), ((13307, 13334), 'numpy.sin', 'np.sin', (['(2 * np.pi * 1 / 9.0)'], {}), '(2 * np.pi * 1 / 9.0)\n', (13313, 13334), True, 'import numpy as np\n'), ((13384, 13411), 'numpy.cos', 'np.cos', (['(2 * np.pi * 1 / 9.0)'], {}), '(2 * np.pi * 1 / 9.0)\n', (13390, 13411), True, 'import numpy as np\n'), ((13445, 13472), 'numpy.cos', 'np.cos', (['(2 * np.pi * 2 / 9.0)'], {}), '(2 * np.pi * 2 / 9.0)\n', (13451, 13472), True, 'import numpy as np\n'), ((13467, 13494), 'numpy.sin', 'np.sin', (['(2 * np.pi * 2 / 9.0)'], {}), '(2 * np.pi * 2 / 9.0)\n', (13473, 13494), True, 'import numpy as np\n'), ((13544, 13571), 'numpy.cos', 'np.cos', (['(2 * np.pi * 2 / 9.0)'], {}), '(2 * np.pi * 2 / 9.0)\n', (13550, 13571), True, 'import numpy as np\n'), ((13605, 13628), 'numpy.cos', 'np.cos', (['(2 * np.pi / 3.0)'], {}), '(2 * np.pi / 3.0)\n', (13611, 13628), True, 'import numpy as np\n'), ((13625, 13648), 'numpy.sin', 'np.sin', (['(2 * np.pi / 3.0)'], {}), '(2 * np.pi / 3.0)\n', (13631, 13648), True, 'import numpy as np\n'), ((13698, 13721), 'numpy.cos', 'np.cos', (['(2 * np.pi / 3.0)'], {}), '(2 * np.pi / 3.0)\n', (13704, 13721), True, 'import numpy as np\n'), ((13362, 13389), 'numpy.sin', 'np.sin', (['(2 * np.pi * 1 / 9.0)'], {}), '(2 * np.pi * 1 / 9.0)\n', (13368, 13389), True, 'import numpy as np\n'), ((13522, 13549), 'numpy.sin', 'np.sin', (['(2 * np.pi * 2 / 9.0)'], {}), '(2 * np.pi * 2 / 9.0)\n', (13528, 13549), True, 'import numpy as np\n'), ((13678, 13701), 'numpy.sin', 'np.sin', (['(2 * np.pi / 3.0)'], {}), '(2 * np.pi / 3.0)\n', (13684, 13701), True, 'import numpy as np\n'), ((13743, 13767), 'numpy.cos', 'np.cos', (['(2 * np.pi / 30.0)'], {}), '(2 * np.pi / 30.0)\n', (13749, 13767), True, 'import numpy as np\n'), ((13767, 13791), 'numpy.sin', 'np.sin', (['(2 * np.pi / 30.0)'], {}), '(2 * np.pi / 30.0)\n', (13773, 13791), True, 'import numpy as np\n'), ((13811, 13835), 'numpy.sin', 'np.sin', (['(2 * np.pi / 30.0)'], {}), '(2 * np.pi / 30.0)\n', (13817, 13835), True, 'import numpy as np\n'), ((13835, 13859), 'numpy.cos', 'np.cos', (['(2 * np.pi / 30.0)'], {}), '(2 * np.pi / 30.0)\n', (13841, 13859), True, 'import numpy as np\n')]
|
import torch
import numpy as np
from torch.autograd import Variable
import torch.optim as optim
import argparse
import random
import os
import models
import torchvision.utils as vutils
import utils
import nyuDataLoader as dataLoader_nyu
import dataLoader as dataLoader_ours
import torch.nn as nn
from torch.utils.data import DataLoader
import torch.nn.functional as F
import wrapperBRDF as wcg
import wrapperNYU as wnyu
import scipy.io as io
import os.path as osp
parser = argparse.ArgumentParser()
# The locationi of training set
parser.add_argument('--dataRoot', default=None, help='path to input images')
parser.add_argument('--NYURoot', default=None, help='path to the NYU dataset')
parser.add_argument('--experimentBRDF', default=None, help='path to the model for BRDF prediction')
parser.add_argument('--experiment', default=None, help='the path to store samples and models')
# The basic training setting
parser.add_argument('--nepochBRDF', type=int, default=14, help='the number of epochs for BRDF prediction')
parser.add_argument('--nepoch', type=int, default=2, help='the number of epochs for training')
parser.add_argument('--batchSize', type=int, default=8, help='input batch size')
parser.add_argument('--imHeight', type=int, default=240, help='the height / width of the input image to network')
parser.add_argument('--imWidth', type=int, default=320, help='the height / width of the input image to network')
parser.add_argument('--cuda', action='store_true', help='enables cuda')
parser.add_argument('--deviceIds', type=int, nargs='+', default=[0, 1], help='the gpus used for training network')
# The training weight
parser.add_argument('--albedoWeight', type=float, default=0.75, help='the weight for the diffuse component')
parser.add_argument('--normalWeight', type=float, default=0.5, help='the weight for the diffuse component')
parser.add_argument('--roughWeight', type=float, default=0.25, help='the weight for the roughness component')
parser.add_argument('--depthWeight', type=float, default=0.25, help='the weight for depth component')
# The training weight on NYU
parser.add_argument('--normalNYUWeight', type=float, default=4.5, help='the weight for the diffuse component')
parser.add_argument('--depthNYUWeight', type=float, default=4.5, help='the weight for depth component')
# Cascae Level
parser.add_argument('--cascadeLevel', type=int, default=0, help='the casacade level')
# The detail network setting
opt = parser.parse_args()
print(opt)
opt.gpuId = opt.deviceIds[0]
torch.multiprocessing.set_sharing_strategy('file_system')
if opt.experiment is None:
opt.experiment = 'check_cascadeNYU%d' % opt.cascadeLevel
os.system('mkdir {0}'.format(opt.experiment) )
os.system('cp *.py %s' % opt.experiment )
if opt.experimentBRDF is None:
opt.experimentBRDF = 'check_cascade0_w%d_h%d' % (opt.imWidth, opt.imHeight )
albeW, normW = opt.albedoWeight, opt.normalWeight
rougW = opt.roughWeight
deptW = opt.depthWeight
normNYUW = opt.normalNYUWeight
depthNYUW = opt.depthNYUWeight
opt.seed = 0
print("Random Seed: ", opt.seed )
random.seed(opt.seed )
torch.manual_seed(opt.seed )
if torch.cuda.is_available() and not opt.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
####################################
# Initial Network
encoder = models.encoder0(cascadeLevel = opt.cascadeLevel )
albedoDecoder = models.decoder0(mode=0 )
normalDecoder = models.decoder0(mode=1 )
roughDecoder = models.decoder0(mode=2 )
depthDecoder = models.decoder0(mode=4 )
####################################################################
#########################################
encoder.load_state_dict( torch.load('{0}/encoder{1}_{2}.pth'.format(opt.experimentBRDF,
opt.cascadeLevel, opt.nepochBRDF-1 ) ).state_dict() )
albedoDecoder.load_state_dict( torch.load('{0}/albedo{1}_{2}.pth'.format(opt.experimentBRDF,
opt.cascadeLevel, opt.nepochBRDF-1 ) ).state_dict() )
normalDecoder.load_state_dict( torch.load('{0}/normal{1}_{2}.pth'.format(opt.experimentBRDF,
opt.cascadeLevel, opt.nepochBRDF-1 ) ).state_dict() )
roughDecoder.load_state_dict( torch.load('{0}/rough{1}_{2}.pth'.format(opt.experimentBRDF,
opt.cascadeLevel, opt.nepochBRDF-1 ) ).state_dict() )
depthDecoder.load_state_dict( torch.load('{0}/depth{1}_{2}.pth'.format(opt.experimentBRDF,
opt.cascadeLevel, opt.nepochBRDF-1 ) ).state_dict() )
lr_scale = 0.5
#########################################
encoder = nn.DataParallel(encoder, device_ids = opt.deviceIds )
albedoDecoder = nn.DataParallel(albedoDecoder, device_ids = opt.deviceIds )
normalDecoder = nn.DataParallel(normalDecoder, device_ids = opt.deviceIds )
roughDecoder = nn.DataParallel(roughDecoder, device_ids = opt.deviceIds )
depthDecoder = nn.DataParallel(depthDecoder, device_ids = opt.deviceIds )
############## ######################
# Send things into GPU
if opt.cuda:
encoder = encoder.cuda(opt.gpuId )
albedoDecoder = albedoDecoder.cuda(opt.gpuId )
normalDecoder = normalDecoder.cuda(opt.gpuId )
roughDecoder = roughDecoder.cuda(opt.gpuId )
depthDecoder = depthDecoder.cuda(opt.gpuId )
####################################
####################################
# Optimizer
opEncoder = optim.Adam(encoder.parameters(), lr=1e-4 * lr_scale, betas=(0.5, 0.999) )
opAlbedo = optim.Adam(albedoDecoder.parameters(), lr=1e-4 * lr_scale, betas=(0.5, 0.999) )
opNormal = optim.Adam(normalDecoder.parameters(), lr=1e-4 * lr_scale, betas=(0.5, 0.999) )
opRough = optim.Adam(roughDecoder.parameters(), lr=1e-4 * lr_scale, betas=(0.5, 0.999) )
opDepth = optim.Adam(depthDecoder.parameters(), lr=1e-4 * lr_scale, betas=(0.5, 0.999) )
#####################################
####################################
brdfDataset = dataLoader_ours.BatchLoader( opt.dataRoot, imWidth = opt.imWidth, imHeight = opt.imHeight,
cascadeLevel = 0, isLight = False )
NYUDataset = dataLoader_nyu.NYULoader(
imRoot = osp.join(opt.NYURoot, 'images'),
normalRoot = osp.join(opt.NYURoot, 'normals'),
depthRoot = osp.join(opt.NYURoot, 'depths'),
segRoot = osp.join(opt.NYURoot, 'masks'),
imHeight = opt.imHeight,
imWidth = opt.imWidth,
phase = 'TRAIN' )
trainDataset = dataLoader_nyu.ConcatDataset(brdfDataset, NYUDataset)
brdfLoader = DataLoader(trainDataset, batch_size = opt.batchSize, num_workers =
6, shuffle = True)
j = 0
# BRDFLost
albedoErrsNpList = np.ones( [1, 1], dtype = np.float32 )
normalErrsNpList = np.ones( [1, 1], dtype = np.float32 )
roughErrsNpList= np.ones( [1, 1], dtype = np.float32 )
depthErrsNpList = np.ones( [1, 1], dtype = np.float32 )
normalNYUErrsNpList = np.ones([1, 1], dtype=np.float32 )
angleNYUErrsNpList = np.ones([1, 1], dtype = np.float32 )
depthNYUErrsNpList = np.ones([1, 1], dtype=np.float32 )
for epoch in list(range(0, opt.nepoch) ):
trainingLog = open('{0}/trainingLog_{1}.txt'.format(opt.experiment, epoch), 'w')
for i, trainBatch in enumerate(brdfLoader):
j += 1
dataBatch = trainBatch[0]
NYUBatch = trainBatch[1]
#####################################################################################################################
############################################# Train with CGBRDF dataset #############################################
#####################################################################################################################
# Clear the gradient in optimizer
opEncoder.zero_grad()
opAlbedo.zero_grad()
opNormal.zero_grad()
opRough.zero_grad()
opDepth.zero_grad()
albedoPair, normalPair, roughPair, depthPair \
= wcg.wrapperBRDF(dataBatch, opt, encoder,
albedoDecoder, normalDecoder, roughDecoder, depthDecoder )
albedoPred, albedoErr = albedoPair[0], albedoPair[1]
normalPred, normalErr = normalPair[0], normalPair[1]
roughPred, roughErr = roughPair[0], roughPair[1]
depthPred, depthErr = depthPair[0], depthPair[1]
# Back propagate the gradients
totalErr = 4 * albeW * albedoErr + normW * normalErr \
+ rougW *roughErr + deptW * depthErr
totalErr.backward()
# Update the network parameter
opEncoder.step()
opAlbedo.step()
opNormal.step()
opRough.step()
opDepth.step()
# Output training error
utils.writeErrToScreen('albedo', [albedoErr], epoch, j)
utils.writeErrToScreen('normal', [normalErr], epoch, j)
utils.writeErrToScreen('rough', [roughErr], epoch, j)
utils.writeErrToScreen('depth', [depthErr], epoch, j)
utils.writeErrToFile('albedo', [albedoErr], trainingLog, epoch, j)
utils.writeErrToFile('normal', [normalErr], trainingLog, epoch, j)
utils.writeErrToFile('rough', [roughErr], trainingLog, epoch, j)
utils.writeErrToFile('depth', [depthErr], trainingLog, epoch, j)
albedoErrsNpList = np.concatenate( [albedoErrsNpList, utils.turnErrorIntoNumpy( [albedoErr] )], axis=0)
normalErrsNpList = np.concatenate( [normalErrsNpList, utils.turnErrorIntoNumpy( [normalErr] )], axis=0)
roughErrsNpList = np.concatenate( [roughErrsNpList, utils.turnErrorIntoNumpy( [roughErr] )], axis=0)
depthErrsNpList = np.concatenate( [depthErrsNpList, utils.turnErrorIntoNumpy( [depthErr] )], axis=0)
if j < 1000:
utils.writeNpErrToScreen('albedoAccu', np.mean(albedoErrsNpList[1:j+1, :], axis=0), epoch, j )
utils.writeNpErrToScreen('normalAccu', np.mean(normalErrsNpList[1:j+1, :], axis=0), epoch, j )
utils.writeNpErrToScreen('roughAccu', np.mean(roughErrsNpList[1:j+1, :], axis=0), epoch, j )
utils.writeNpErrToScreen('depthAccu', np.mean(depthErrsNpList[1:j+1, :], axis=0), epoch, j )
utils.writeNpErrToFile('albedoAccu', np.mean(albedoErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('normalAccu', np.mean(normalErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('roughAccu', np.mean(roughErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('depthAccu', np.mean(depthErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
else:
utils.writeNpErrToScreen('albedoAccu', np.mean(albedoErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('normalAccu', np.mean(normalErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('roughAccu', np.mean(roughErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('depthAccu', np.mean(depthErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToFile('albedoAccu', np.mean(albedoErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('normalAccu', np.mean(normalErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('roughAccu', np.mean(roughErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('depthAccu', np.mean(depthErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
if j == 1 or j% 2000 == 0:
# Save the predicted results
vutils.save_image( ( (albedoPred ) ** (1.0/2.2) ).data,
'{0}/{1}_albedoPred_{2}.png'.format(opt.experiment, j, 0) )
vutils.save_image( ( 0.5*(normalPred + 1) ).data,
'{0}/{1}_normalPred_{2}.png'.format(opt.experiment, j, 0) )
vutils.save_image( ( 0.5*(roughPred + 1) ).data,
'{0}/{1}_roughPred_{2}.png'.format(opt.experiment, j, 0) )
depthOut = 1 / torch.clamp(depthPred + 1, 1e-6, 10)
vutils.save_image( ( depthOut ).data,
'{0}/{1}_depthPred_{2}.png'.format(opt.experiment, j, 0) )
##############################################################################################################
######################################## Train with NYU dataset ##############################################
##############################################################################################################
# Clear the gradient in optimizer
opEncoder.zero_grad()
opAlbedo.zero_grad()
opNormal.zero_grad()
opRough.zero_grad()
opDepth.zero_grad()
albedoPair, normalPair, roughPair, depthPair \
= wnyu.wrapperNYU(NYUBatch, opt, encoder,
albedoDecoder, normalDecoder, roughDecoder, depthDecoder )
albedoPred = albedoPair[0]
normalPred, normalErr, angleErr = normalPair[0], normalPair[1], normalPair[2]
roughPred = roughPair[0]
depthPred, depthErr = depthPair[0], depthPair[1]
totalErr = normNYUW * normalErr + depthNYUW * depthErr
totalErr.backward()
# Update the network parameter
opEncoder.step()
opAlbedo.step()
opNormal.step()
opRough.step()
opDepth.step()
# Output training error
utils.writeErrToScreen('normalNYU', [normalErr], epoch, j)
utils.writeErrToScreen('angleNYU', [angleErr], epoch, j)
utils.writeErrToScreen('depthNYU', [depthErr], epoch, j)
utils.writeErrToFile('normalNYU', [normalErr], trainingLog, epoch, j)
utils.writeErrToFile('angleNYU', [angleErr], trainingLog, epoch, j)
utils.writeErrToFile('depthNYU', [depthErr], trainingLog, epoch, j)
normalNYUErrsNpList = np.concatenate( [normalNYUErrsNpList, utils.turnErrorIntoNumpy( [normalErr] )], axis=0)
angleNYUErrsNpList = np.concatenate( [angleNYUErrsNpList, utils.turnErrorIntoNumpy( [angleErr] )], axis=0)
depthNYUErrsNpList = np.concatenate( [depthNYUErrsNpList, utils.turnErrorIntoNumpy( [depthErr] )], axis=0)
if j < 1000:
utils.writeNpErrToScreen('normalAccuNYU', np.mean(normalNYUErrsNpList[1:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('angleAccuNYU', np.mean(angleNYUErrsNpList[1:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('depthAccuNYU', np.mean(depthNYUErrsNpList[1:j+1, :], axis=0), epoch, j)
utils.writeNpErrToFile('normalAccuNYU', np.mean(normalNYUErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('angleAccuNYU', np.mean(angleNYUErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('depthAccuNYU', np.mean(depthNYUErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
else:
utils.writeNpErrToScreen('normalAccuNYU', np.mean(normalNYUErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('angleAccuNYU', np.mean(angleNYUErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('depthAccuNYU', np.mean(depthNYUErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToFile('normalAccuNYU', np.mean(normalNYUErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('angleAccuNYU', np.mean(angleNYUErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('depthAccuNYU', np.mean(depthNYUErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
if j == 1 or j% 500 == 0:
# Save the predicted results
vutils.save_image( ( (albedoPred ) ** (1.0/2.2) ).data,
'{0}/{1}_albedoPredNYU_{2}.png'.format(opt.experiment, j, 0) )
vutils.save_image( ( 0.5*(normalPred + 1) ).data,
'{0}/{1}_normalPredNYU_{2}.png'.format(opt.experiment, j, 0) )
vutils.save_image( ( 0.5*(roughPred + 1) ).data,
'{0}/{1}_roughPredNYU_{2}.png'.format(opt.experiment, j, 0) )
depthOut = 1 / torch.clamp(depthPred + 1, 1e-6, 10)
vutils.save_image( ( depthOut ).data,
'{0}/{1}_depthPredNYU_{2}.png'.format(opt.experiment, j, 0) )
if j % 2000 == 0:
# save the models
torch.save(encoder.module, '{0}/encoder{1}_{2}_{3}.pth'.format(opt.experiment, opt.cascadeLevel, epoch, j) )
torch.save(albedoDecoder.module, '{0}/albedo{1}_{2}_{3}.pth'.format(opt.experiment, opt.cascadeLevel, epoch, j) )
torch.save(normalDecoder.module, '{0}/normal{1}_{2}_{3}.pth'.format(opt.experiment, opt.cascadeLevel, epoch, j) )
torch.save(roughDecoder.module, '{0}/rough{1}_{2}_{3}.pth'.format(opt.experiment, opt.cascadeLevel, epoch, j) )
torch.save(depthDecoder.module, '{0}/depth{1}_{2}_{3}.pth'.format(opt.experiment, opt.cascadeLevel, epoch, j) )
######################################################################################################################
trainingLog.close()
# Update the training rate
if (epoch + 1) % 10 == 0:
for param_group in opEncoder.param_groups:
param_group['lr'] /= 2
for param_group in opAlbedo.param_groups:
param_group['lr'] /= 2
for param_group in opNormal.param_groups:
param_group['lr'] /= 2
for param_group in opRough.param_groups:
param_group['lr'] /= 2
for param_group in opDepth.param_groups:
param_group['lr'] /= 2
# Save the error record
np.save('{0}/albedoError_{1}.npy'.format(opt.experiment, epoch), albedoErrsNpList )
np.save('{0}/normalError_{1}.npy'.format(opt.experiment, epoch), normalErrsNpList )
np.save('{0}/roughError_{1}.npy'.format(opt.experiment, epoch), roughErrsNpList )
np.save('{0}/depthError_{1}.npy'.format(opt.experiment, epoch), depthErrsNpList )
np.save('{0}/normalNYUError_{1}.npy'.format(opt.experiment, epoch), normalNYUErrsNpList )
np.save('{0}/angleNYUError_{1}.npy'.format(opt.experiment, epoch), angleNYUErrsNpList )
# save the models
torch.save(encoder.module, '{0}/encoder{1}_{2}.pth'.format(opt.experiment, opt.cascadeLevel, epoch) )
torch.save(albedoDecoder.module, '{0}/albedo{1}_{2}.pth'.format(opt.experiment, opt.cascadeLevel, epoch) )
torch.save(normalDecoder.module, '{0}/normal{1}_{2}.pth'.format(opt.experiment, opt.cascadeLevel, epoch) )
torch.save(roughDecoder.module, '{0}/rough{1}_{2}.pth'.format(opt.experiment, opt.cascadeLevel, epoch) )
torch.save(depthDecoder.module, '{0}/depth{1}_{2}.pth'.format(opt.experiment, opt.cascadeLevel, epoch) )
|
[
"argparse.ArgumentParser",
"numpy.ones",
"utils.turnErrorIntoNumpy",
"numpy.mean",
"dataLoader.BatchLoader",
"os.path.join",
"torch.multiprocessing.set_sharing_strategy",
"torch.utils.data.DataLoader",
"nyuDataLoader.ConcatDataset",
"random.seed",
"wrapperBRDF.wrapperBRDF",
"models.encoder0",
"torch.manual_seed",
"os.system",
"torch.clamp",
"torch.cuda.is_available",
"utils.writeErrToScreen",
"utils.writeErrToFile",
"wrapperNYU.wrapperNYU",
"torch.nn.DataParallel",
"models.decoder0"
] |
[((474, 499), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (497, 499), False, 'import argparse\n'), ((2503, 2560), 'torch.multiprocessing.set_sharing_strategy', 'torch.multiprocessing.set_sharing_strategy', (['"""file_system"""'], {}), "('file_system')\n", (2545, 2560), False, 'import torch\n'), ((2698, 2738), 'os.system', 'os.system', (["('cp *.py %s' % opt.experiment)"], {}), "('cp *.py %s' % opt.experiment)\n", (2707, 2738), False, 'import os\n'), ((3063, 3084), 'random.seed', 'random.seed', (['opt.seed'], {}), '(opt.seed)\n', (3074, 3084), False, 'import random\n'), ((3086, 3113), 'torch.manual_seed', 'torch.manual_seed', (['opt.seed'], {}), '(opt.seed)\n', (3103, 3113), False, 'import torch\n'), ((3314, 3360), 'models.encoder0', 'models.encoder0', ([], {'cascadeLevel': 'opt.cascadeLevel'}), '(cascadeLevel=opt.cascadeLevel)\n', (3329, 3360), False, 'import models\n'), ((3380, 3403), 'models.decoder0', 'models.decoder0', ([], {'mode': '(0)'}), '(mode=0)\n', (3395, 3403), False, 'import models\n'), ((3421, 3444), 'models.decoder0', 'models.decoder0', ([], {'mode': '(1)'}), '(mode=1)\n', (3436, 3444), False, 'import models\n'), ((3461, 3484), 'models.decoder0', 'models.decoder0', ([], {'mode': '(2)'}), '(mode=2)\n', (3476, 3484), False, 'import models\n'), ((3501, 3524), 'models.decoder0', 'models.decoder0', ([], {'mode': '(4)'}), '(mode=4)\n', (3516, 3524), False, 'import models\n'), ((4494, 4544), 'torch.nn.DataParallel', 'nn.DataParallel', (['encoder'], {'device_ids': 'opt.deviceIds'}), '(encoder, device_ids=opt.deviceIds)\n', (4509, 4544), True, 'import torch.nn as nn\n'), ((4564, 4620), 'torch.nn.DataParallel', 'nn.DataParallel', (['albedoDecoder'], {'device_ids': 'opt.deviceIds'}), '(albedoDecoder, device_ids=opt.deviceIds)\n', (4579, 4620), True, 'import torch.nn as nn\n'), ((4640, 4696), 'torch.nn.DataParallel', 'nn.DataParallel', (['normalDecoder'], {'device_ids': 'opt.deviceIds'}), '(normalDecoder, device_ids=opt.deviceIds)\n', (4655, 4696), True, 'import torch.nn as nn\n'), ((4715, 4770), 'torch.nn.DataParallel', 'nn.DataParallel', (['roughDecoder'], {'device_ids': 'opt.deviceIds'}), '(roughDecoder, device_ids=opt.deviceIds)\n', (4730, 4770), True, 'import torch.nn as nn\n'), ((4789, 4844), 'torch.nn.DataParallel', 'nn.DataParallel', (['depthDecoder'], {'device_ids': 'opt.deviceIds'}), '(depthDecoder, device_ids=opt.deviceIds)\n', (4804, 4844), True, 'import torch.nn as nn\n'), ((5788, 5909), 'dataLoader.BatchLoader', 'dataLoader_ours.BatchLoader', (['opt.dataRoot'], {'imWidth': 'opt.imWidth', 'imHeight': 'opt.imHeight', 'cascadeLevel': '(0)', 'isLight': '(False)'}), '(opt.dataRoot, imWidth=opt.imWidth, imHeight=opt\n .imHeight, cascadeLevel=0, isLight=False)\n', (5815, 5909), True, 'import dataLoader as dataLoader_ours\n'), ((6275, 6328), 'nyuDataLoader.ConcatDataset', 'dataLoader_nyu.ConcatDataset', (['brdfDataset', 'NYUDataset'], {}), '(brdfDataset, NYUDataset)\n', (6303, 6328), True, 'import nyuDataLoader as dataLoader_nyu\n'), ((6342, 6421), 'torch.utils.data.DataLoader', 'DataLoader', (['trainDataset'], {'batch_size': 'opt.batchSize', 'num_workers': '(6)', 'shuffle': '(True)'}), '(trainDataset, batch_size=opt.batchSize, num_workers=6, shuffle=True)\n', (6352, 6421), False, 'from torch.utils.data import DataLoader\n'), ((6473, 6506), 'numpy.ones', 'np.ones', (['[1, 1]'], {'dtype': 'np.float32'}), '([1, 1], dtype=np.float32)\n', (6480, 6506), True, 'import numpy as np\n'), ((6530, 6563), 'numpy.ones', 'np.ones', (['[1, 1]'], {'dtype': 'np.float32'}), '([1, 1], dtype=np.float32)\n', (6537, 6563), True, 'import numpy as np\n'), ((6585, 6618), 'numpy.ones', 'np.ones', (['[1, 1]'], {'dtype': 'np.float32'}), '([1, 1], dtype=np.float32)\n', (6592, 6618), True, 'import numpy as np\n'), ((6641, 6674), 'numpy.ones', 'np.ones', (['[1, 1]'], {'dtype': 'np.float32'}), '([1, 1], dtype=np.float32)\n', (6648, 6674), True, 'import numpy as np\n'), ((6702, 6735), 'numpy.ones', 'np.ones', (['[1, 1]'], {'dtype': 'np.float32'}), '([1, 1], dtype=np.float32)\n', (6709, 6735), True, 'import numpy as np\n'), ((6758, 6791), 'numpy.ones', 'np.ones', (['[1, 1]'], {'dtype': 'np.float32'}), '([1, 1], dtype=np.float32)\n', (6765, 6791), True, 'import numpy as np\n'), ((6816, 6849), 'numpy.ones', 'np.ones', (['[1, 1]'], {'dtype': 'np.float32'}), '([1, 1], dtype=np.float32)\n', (6823, 6849), True, 'import numpy as np\n'), ((3119, 3144), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3142, 3144), False, 'import torch\n'), ((5979, 6010), 'os.path.join', 'osp.join', (['opt.NYURoot', '"""images"""'], {}), "(opt.NYURoot, 'images')\n", (5987, 6010), True, 'import os.path as osp\n'), ((6033, 6065), 'os.path.join', 'osp.join', (['opt.NYURoot', '"""normals"""'], {}), "(opt.NYURoot, 'normals')\n", (6041, 6065), True, 'import os.path as osp\n'), ((6087, 6118), 'os.path.join', 'osp.join', (['opt.NYURoot', '"""depths"""'], {}), "(opt.NYURoot, 'depths')\n", (6095, 6118), True, 'import os.path as osp\n'), ((6138, 6168), 'os.path.join', 'osp.join', (['opt.NYURoot', '"""masks"""'], {}), "(opt.NYURoot, 'masks')\n", (6146, 6168), True, 'import os.path as osp\n'), ((7740, 7842), 'wrapperBRDF.wrapperBRDF', 'wcg.wrapperBRDF', (['dataBatch', 'opt', 'encoder', 'albedoDecoder', 'normalDecoder', 'roughDecoder', 'depthDecoder'], {}), '(dataBatch, opt, encoder, albedoDecoder, normalDecoder,\n roughDecoder, depthDecoder)\n', (7755, 7842), True, 'import wrapperBRDF as wcg\n'), ((8477, 8532), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""albedo"""', '[albedoErr]', 'epoch', 'j'], {}), "('albedo', [albedoErr], epoch, j)\n", (8499, 8532), False, 'import utils\n'), ((8541, 8596), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""normal"""', '[normalErr]', 'epoch', 'j'], {}), "('normal', [normalErr], epoch, j)\n", (8563, 8596), False, 'import utils\n'), ((8605, 8658), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""rough"""', '[roughErr]', 'epoch', 'j'], {}), "('rough', [roughErr], epoch, j)\n", (8627, 8658), False, 'import utils\n'), ((8667, 8720), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""depth"""', '[depthErr]', 'epoch', 'j'], {}), "('depth', [depthErr], epoch, j)\n", (8689, 8720), False, 'import utils\n'), ((8730, 8796), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""albedo"""', '[albedoErr]', 'trainingLog', 'epoch', 'j'], {}), "('albedo', [albedoErr], trainingLog, epoch, j)\n", (8750, 8796), False, 'import utils\n'), ((8805, 8871), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""normal"""', '[normalErr]', 'trainingLog', 'epoch', 'j'], {}), "('normal', [normalErr], trainingLog, epoch, j)\n", (8825, 8871), False, 'import utils\n'), ((8880, 8944), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""rough"""', '[roughErr]', 'trainingLog', 'epoch', 'j'], {}), "('rough', [roughErr], trainingLog, epoch, j)\n", (8900, 8944), False, 'import utils\n'), ((8953, 9017), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""depth"""', '[depthErr]', 'trainingLog', 'epoch', 'j'], {}), "('depth', [depthErr], trainingLog, epoch, j)\n", (8973, 9017), False, 'import utils\n'), ((12614, 12715), 'wrapperNYU.wrapperNYU', 'wnyu.wrapperNYU', (['NYUBatch', 'opt', 'encoder', 'albedoDecoder', 'normalDecoder', 'roughDecoder', 'depthDecoder'], {}), '(NYUBatch, opt, encoder, albedoDecoder, normalDecoder,\n roughDecoder, depthDecoder)\n', (12629, 12715), True, 'import wrapperNYU as wnyu\n'), ((13233, 13291), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""normalNYU"""', '[normalErr]', 'epoch', 'j'], {}), "('normalNYU', [normalErr], epoch, j)\n", (13255, 13291), False, 'import utils\n'), ((13300, 13356), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""angleNYU"""', '[angleErr]', 'epoch', 'j'], {}), "('angleNYU', [angleErr], epoch, j)\n", (13322, 13356), False, 'import utils\n'), ((13365, 13421), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""depthNYU"""', '[depthErr]', 'epoch', 'j'], {}), "('depthNYU', [depthErr], epoch, j)\n", (13387, 13421), False, 'import utils\n'), ((13430, 13499), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""normalNYU"""', '[normalErr]', 'trainingLog', 'epoch', 'j'], {}), "('normalNYU', [normalErr], trainingLog, epoch, j)\n", (13450, 13499), False, 'import utils\n'), ((13508, 13575), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""angleNYU"""', '[angleErr]', 'trainingLog', 'epoch', 'j'], {}), "('angleNYU', [angleErr], trainingLog, epoch, j)\n", (13528, 13575), False, 'import utils\n'), ((13584, 13651), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""depthNYU"""', '[depthErr]', 'trainingLog', 'epoch', 'j'], {}), "('depthNYU', [depthErr], trainingLog, epoch, j)\n", (13604, 13651), False, 'import utils\n'), ((9081, 9118), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['[albedoErr]'], {}), '([albedoErr])\n', (9105, 9118), False, 'import utils\n'), ((9193, 9230), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['[normalErr]'], {}), '([normalErr])\n', (9217, 9230), False, 'import utils\n'), ((9303, 9339), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['[roughErr]'], {}), '([roughErr])\n', (9327, 9339), False, 'import utils\n'), ((9412, 9448), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['[depthErr]'], {}), '([depthErr])\n', (9436, 9448), False, 'import utils\n'), ((9534, 9579), 'numpy.mean', 'np.mean', (['albedoErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(albedoErrsNpList[1:j + 1, :], axis=0)\n', (9541, 9579), True, 'import numpy as np\n'), ((9641, 9686), 'numpy.mean', 'np.mean', (['normalErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(normalErrsNpList[1:j + 1, :], axis=0)\n', (9648, 9686), True, 'import numpy as np\n'), ((9747, 9791), 'numpy.mean', 'np.mean', (['roughErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(roughErrsNpList[1:j + 1, :], axis=0)\n', (9754, 9791), True, 'import numpy as np\n'), ((9852, 9896), 'numpy.mean', 'np.mean', (['depthErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(depthErrsNpList[1:j + 1, :], axis=0)\n', (9859, 9896), True, 'import numpy as np\n'), ((9957, 10002), 'numpy.mean', 'np.mean', (['albedoErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(albedoErrsNpList[1:j + 1, :], axis=0)\n', (9964, 10002), True, 'import numpy as np\n'), ((10074, 10119), 'numpy.mean', 'np.mean', (['normalErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(normalErrsNpList[1:j + 1, :], axis=0)\n', (10081, 10119), True, 'import numpy as np\n'), ((10190, 10234), 'numpy.mean', 'np.mean', (['roughErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(roughErrsNpList[1:j + 1, :], axis=0)\n', (10197, 10234), True, 'import numpy as np\n'), ((10305, 10349), 'numpy.mean', 'np.mean', (['depthErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(depthErrsNpList[1:j + 1, :], axis=0)\n', (10312, 10349), True, 'import numpy as np\n'), ((10437, 10488), 'numpy.mean', 'np.mean', (['albedoErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(albedoErrsNpList[j - 999:j + 1, :], axis=0)\n', (10444, 10488), True, 'import numpy as np\n'), ((10547, 10598), 'numpy.mean', 'np.mean', (['normalErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(normalErrsNpList[j - 999:j + 1, :], axis=0)\n', (10554, 10598), True, 'import numpy as np\n'), ((10656, 10706), 'numpy.mean', 'np.mean', (['roughErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(roughErrsNpList[j - 999:j + 1, :], axis=0)\n', (10663, 10706), True, 'import numpy as np\n'), ((10764, 10814), 'numpy.mean', 'np.mean', (['depthErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(depthErrsNpList[j - 999:j + 1, :], axis=0)\n', (10771, 10814), True, 'import numpy as np\n'), ((10872, 10923), 'numpy.mean', 'np.mean', (['albedoErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(albedoErrsNpList[j - 999:j + 1, :], axis=0)\n', (10879, 10923), True, 'import numpy as np\n'), ((10993, 11044), 'numpy.mean', 'np.mean', (['normalErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(normalErrsNpList[j - 999:j + 1, :], axis=0)\n', (11000, 11044), True, 'import numpy as np\n'), ((11113, 11163), 'numpy.mean', 'np.mean', (['roughErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(roughErrsNpList[j - 999:j + 1, :], axis=0)\n', (11120, 11163), True, 'import numpy as np\n'), ((11232, 11282), 'numpy.mean', 'np.mean', (['depthErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(depthErrsNpList[j - 999:j + 1, :], axis=0)\n', (11239, 11282), True, 'import numpy as np\n'), ((11837, 11874), 'torch.clamp', 'torch.clamp', (['(depthPred + 1)', '(1e-06)', '(10)'], {}), '(depthPred + 1, 1e-06, 10)\n', (11848, 11874), False, 'import torch\n'), ((13720, 13757), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['[normalErr]'], {}), '([normalErr])\n', (13744, 13757), False, 'import utils\n'), ((13836, 13872), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['[angleErr]'], {}), '([angleErr])\n', (13860, 13872), False, 'import utils\n'), ((13951, 13987), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['[depthErr]'], {}), '([depthErr])\n', (13975, 13987), False, 'import utils\n'), ((14076, 14124), 'numpy.mean', 'np.mean', (['normalNYUErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(normalNYUErrsNpList[1:j + 1, :], axis=0)\n', (14083, 14124), True, 'import numpy as np\n'), ((14187, 14234), 'numpy.mean', 'np.mean', (['angleNYUErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(angleNYUErrsNpList[1:j + 1, :], axis=0)\n', (14194, 14234), True, 'import numpy as np\n'), ((14297, 14344), 'numpy.mean', 'np.mean', (['depthNYUErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(depthNYUErrsNpList[1:j + 1, :], axis=0)\n', (14304, 14344), True, 'import numpy as np\n'), ((14407, 14455), 'numpy.mean', 'np.mean', (['normalNYUErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(normalNYUErrsNpList[1:j + 1, :], axis=0)\n', (14414, 14455), True, 'import numpy as np\n'), ((14529, 14576), 'numpy.mean', 'np.mean', (['angleNYUErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(angleNYUErrsNpList[1:j + 1, :], axis=0)\n', (14536, 14576), True, 'import numpy as np\n'), ((14650, 14697), 'numpy.mean', 'np.mean', (['depthNYUErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(depthNYUErrsNpList[1:j + 1, :], axis=0)\n', (14657, 14697), True, 'import numpy as np\n'), ((14788, 14842), 'numpy.mean', 'np.mean', (['normalNYUErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(normalNYUErrsNpList[j - 999:j + 1, :], axis=0)\n', (14795, 14842), True, 'import numpy as np\n'), ((14903, 14956), 'numpy.mean', 'np.mean', (['angleNYUErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(angleNYUErrsNpList[j - 999:j + 1, :], axis=0)\n', (14910, 14956), True, 'import numpy as np\n'), ((15017, 15070), 'numpy.mean', 'np.mean', (['depthNYUErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(depthNYUErrsNpList[j - 999:j + 1, :], axis=0)\n', (15024, 15070), True, 'import numpy as np\n'), ((15131, 15185), 'numpy.mean', 'np.mean', (['normalNYUErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(normalNYUErrsNpList[j - 999:j + 1, :], axis=0)\n', (15138, 15185), True, 'import numpy as np\n'), ((15257, 15310), 'numpy.mean', 'np.mean', (['angleNYUErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(angleNYUErrsNpList[j - 999:j + 1, :], axis=0)\n', (15264, 15310), True, 'import numpy as np\n'), ((15382, 15435), 'numpy.mean', 'np.mean', (['depthNYUErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(depthNYUErrsNpList[j - 999:j + 1, :], axis=0)\n', (15389, 15435), True, 'import numpy as np\n'), ((16001, 16038), 'torch.clamp', 'torch.clamp', (['(depthPred + 1)', '(1e-06)', '(10)'], {}), '(depthPred + 1, 1e-06, 10)\n', (16012, 16038), False, 'import torch\n')]
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import pickle
import numpy as np
from astropy.time import Time
class TestPickle:
"""Basic pickle test of time"""
def test_pickle(self):
times = ['1999-01-01 00:00:00.123456789', '2010-01-01 00:00:00']
t1 = Time(times, scale='utc')
for prot in range(pickle.HIGHEST_PROTOCOL):
t1d = pickle.dumps(t1, prot)
t1l = pickle.loads(t1d)
assert np.all(t1l == t1)
t2 = Time('2012-06-30 12:00:00', scale='utc')
for prot in range(pickle.HIGHEST_PROTOCOL):
t2d = pickle.dumps(t2, prot)
t2l = pickle.loads(t2d)
assert t2l == t2
|
[
"pickle.loads",
"astropy.time.Time",
"numpy.all",
"pickle.dumps"
] |
[((300, 324), 'astropy.time.Time', 'Time', (['times'], {'scale': '"""utc"""'}), "(times, scale='utc')\n", (304, 324), False, 'from astropy.time import Time\n'), ((506, 546), 'astropy.time.Time', 'Time', (['"""2012-06-30 12:00:00"""'], {'scale': '"""utc"""'}), "('2012-06-30 12:00:00', scale='utc')\n", (510, 546), False, 'from astropy.time import Time\n'), ((396, 418), 'pickle.dumps', 'pickle.dumps', (['t1', 'prot'], {}), '(t1, prot)\n', (408, 418), False, 'import pickle\n'), ((437, 454), 'pickle.loads', 'pickle.loads', (['t1d'], {}), '(t1d)\n', (449, 454), False, 'import pickle\n'), ((474, 491), 'numpy.all', 'np.all', (['(t1l == t1)'], {}), '(t1l == t1)\n', (480, 491), True, 'import numpy as np\n'), ((618, 640), 'pickle.dumps', 'pickle.dumps', (['t2', 'prot'], {}), '(t2, prot)\n', (630, 640), False, 'import pickle\n'), ((659, 676), 'pickle.loads', 'pickle.loads', (['t2d'], {}), '(t2d)\n', (671, 676), False, 'import pickle\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
import unittest
import numpy as np
from cclib.bridge import cclib2pyscf
from cclib.parser.utils import find_package
class PyscfTest(unittest.TestCase):
"""Tests for the cclib2pyscf bridge in cclib."""
def setUp(self):
super(PyscfTest, self).setUp()
if not find_package('pyscf'):
raise ImportError('Must install pyscf to run this test')
def test_makepyscf(self):
import pyscf
from pyscf import scf
atomnos = np.array([1, 8, 1], "i")
atomcoords = np.array([[-1, 1, 0], [0, 0, 0], [1, 1, 0]], "f")
pyscfmol = cclib2pyscf.makepyscf(atomcoords, atomnos)
pyscfmol.basis = "6-31G**"
pyscfmol.cart = True
pyscfmol.verbose = 0
pyscfmol.build()
mhf = pyscfmol.HF(conv_tol=1e-6)
en = mhf.kernel()
ref = -75.824754602
assert abs(en - ref) < 1.0e-6
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"cclib.bridge.cclib2pyscf.makepyscf",
"numpy.array",
"cclib.parser.utils.find_package"
] |
[((1127, 1142), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1140, 1142), False, 'import unittest\n'), ((684, 708), 'numpy.array', 'np.array', (['[1, 8, 1]', '"""i"""'], {}), "([1, 8, 1], 'i')\n", (692, 708), True, 'import numpy as np\n'), ((730, 779), 'numpy.array', 'np.array', (['[[-1, 1, 0], [0, 0, 0], [1, 1, 0]]', '"""f"""'], {}), "([[-1, 1, 0], [0, 0, 0], [1, 1, 0]], 'f')\n", (738, 779), True, 'import numpy as np\n'), ((799, 841), 'cclib.bridge.cclib2pyscf.makepyscf', 'cclib2pyscf.makepyscf', (['atomcoords', 'atomnos'], {}), '(atomcoords, atomnos)\n', (820, 841), False, 'from cclib.bridge import cclib2pyscf\n'), ((491, 512), 'cclib.parser.utils.find_package', 'find_package', (['"""pyscf"""'], {}), "('pyscf')\n", (503, 512), False, 'from cclib.parser.utils import find_package\n')]
|
"""DimensionHelper module
Contains a bunch of helper classes mostly used in the array.ScaledArray type.
The goal of the ScaledArray type is to be able to access a data array using
floating point indexes instead of integer indexes.
The idea behind this is that the data array represent a section of space.
For example a MesoNH data cube represent a cube of atmosphere. This cube has a size
in number of lines and columns,
"""
import numpy as np
from scipy.interpolate import griddata
from scipy.interpolate import interp1d
from nephelae.types import Bounds
class AffineTransform:
def __init__(self, alpha, beta):
self.alpha = alpha
self.beta = beta
def __call__(self, x):
return self.alpha * x + self.beta
class UnitsIndexConverter:
"""UnitsIndexConverter
Base class to transform a tuple of index based indexing to a
tuple of units based indexing and vice-versa.
/!\ Is an abstract class. Concrete child classes must implement
toUnits(key) and toIndex(key).
"""
def __init__(self, dimSize):
self.dimSize = dimSize
def to_unit(self, key):
if isinstance(key, (int, float)):
return float(self.toUnit(key))
elif type(key) == slice:
if key.start is None:
key_start = self.to_unit(0)
else:
key_start = self.to_unit(key.start)
if key.stop is None:
key_stop = self.to_unit(self.dimSize - 1) # -1 because python slice...
else:
key_stop = self.to_unit(key.stop - 1) # -1 because python slice...
return slice(key_start, key_stop, None)
else:
raise ValueError("key must be a slice or a numeric type.")
def to_index(self, key):
if isinstance(key, (int, float)):
return int(self.toIndex(key) + 0.5) # rounding to closest integer
elif type(key) == slice:
if key.start is None:
key_start = 0
else:
key_start = int(self.to_index(key.start))
if key.stop is None:
key_stop = self.dimSize
else:
key_stop = self.to_index(key.stop) + 1 # +1 because python slice...
return slice(key_start, key_stop, None)
else:
raise ValueError("key must be a slice or a numeric type.")
def linear_interpolation_indexes(self, key):
"""
If key is a scalar, returns two pairs (key, weight) which are
to be used to compute a weighted sum of two elements in an array,
effectively computing a linear interpolation.
If key is a slice, returns a single pair (key, weight), with
the key being self.to_index(key) and the weight being 1.0
(no interpolation if getting a non scalar subset of a dimension).
/!\ returned key must be insides tuples to be able to concatenate keys
cleanly.
"""
if isinstance(key, slice):
output = [{'key':(self.to_index(key),), 'weight':1.0}]
return [{'key':(self.to_index(key),), 'weight':1.0}]
elif isinstance(key, (int, float)):
lowIndex = int(self.toIndex(key))
highIndex = lowIndex + 1
try:
lowUnit = self.to_unit(lowIndex)
highUnit = self.to_unit(highIndex)
lmbd = (key - lowUnit) / (highUnit - lowUnit)
return [{'key':(lowIndex,), 'weight': 1.0-lmbd},
{'key':(highIndex,), 'weight': lmbd}]
except:
return [{'key':(lowIndex,), 'weight': 1.0}]
else:
raise ValueError("key must be a slice or a numeric type.")
def bounds(self):
maxSlice = self.to_unit(slice(None,None,None))
return Bounds(maxSlice.start, maxSlice.stop)
def span(self):
bounds = self.bounds()
return bounds[-1] - bounds[0]
class AffineDimension(UnitsIndexConverter):
"""
AffineDimension : maps input 1D indexes to output 1D scale through
affine transformation.
"""
def __init__(self, dimSpan, dimSize):
super().__init__(dimSize)
self.toUnit = AffineTransform((dimSpan[-1] - dimSpan[0]) / (self.dimSize - 1), dimSpan[0])
self.toIndex = AffineTransform((self.dimSize - 1) / (dimSpan[-1] - dimSpan[0]),
-dimSpan[0]*(self.dimSize - 1) / (dimSpan[-1] - dimSpan[0]))
def subdimension(self, key):
"""Build a new AffineDimension which is a subset of self
Return None if key is not a slice.
Returns a new AffineDimension if key is a slice.
"""
index = self.to_index(key)
if isinstance(index, int):
return None
# here index is a slice
if index.stop - index.start <= 1:
# Here key represent a single element
return None
units = self.to_unit(index) # recompute units for clean borders
return AffineDimension([units.start, units.stop], index.stop - index.start)
class LookupTableDimension(UnitsIndexConverter):
"""
LookupTableDimension : maps input 1D indexes to output 1D scale through an array
defining a stricly monotonous function.
"""
def __init__(self, inputToOutput):
super().__init__(len(inputToOutput))
x_in = np.linspace(0, self.dimSize-1, self.dimSize)
self.toUnit = interp1d(x_in, np.array(inputToOutput))
self.toIndex = interp1d(np.array(inputToOutput), x_in)
def subdimension(self, key):
"""Build a new LookupTableDimension which is a subset of self
Return None if key is not a slice. Returns a new AffineDimension instead.
"""
index = self.to_index(key)
if isinstance(index, int):
return None
# here index is a slice
if index.stop - index.start <= 1:
# Here key reresent a single element
return None
return LookupTableDimension(self.toUnit.y[index])
class DimensionHelper:
"""DimensionHelper
Helper class to convert a tuple of indexes or units to
their units or indexes counterpart. To be used in ScaledArray
"""
def __init__(self):
self.dims = []
def add_dimension(self, params, typ='linear', dimLen=None):
if typ == 'linear':
if dimLen is None:
dimLen = len(params)
self.dims.append(AffineDimension([params[0], params[-1]], dimLen))
elif typ == 'LUT':
self.dims.append(LookupTableDimension(params))
elif typ == 'empty':
return
else:
raise ValueError("Invalid dimension type '" + typ + "'")
def to_unit(self, keys):
if len(keys) != len(self.dims):
raise ValueError("Number or keys must be equal to number of " +
"Dimension (" + str(len(keys)) + "/" +
str(len(self.dims)) + ")")
res = []
for key, dim in zip(keys, self.dims):
res.append(dim.to_unit(key))
return tuple(res)
def to_index(self, keys):
if len(keys) != len(self.dims):
raise ValueError("Number or keys must be equal to number of " +
"Dimension (" + str(len(keys)) + "/" +
str(len(self.dims)) + ")")
res = []
for key, dim in zip(keys, self.dims):
res.append(dim.to_index(key))
return tuple(res)
def subarray_dimensions(self, keys):
"""Compute the new DimensionHelper object associated to the subarray
corresponding to the keys.
"""
if len(keys) != len(self.dims):
raise ValueError("Number of keys must be equal to the number of" +
" dimensions. (Got " + str(len(keys)) + "/"
+ str(len(self.dims)) + ")")
newDims = DimensionHelper()
for key, dim in zip(keys, self.dims):
newDim = dim.subdimension(key)
if newDim is not None:
newDims.dims.append(newDim)
return newDims
def linear_interpolation_keys(self, keys):
""" Returns a list of pairs of keys and weights to compute a linear
interpolation. The interpolation computation should read in the
main array using generated keys and compute a weighted sum of the
resulting subrrays using the associated weights.
"""
if len(keys) != len(self.dims):
raise ValueError("Number of keys must be equal to the number of" +
" dimensions. (Got " + str(len(keys)) + "/"
+ str(len(self.dims)) + ")")
weightedKeys = []
for key, dim in zip(keys, self.dims):
weightedKeys.append(dim.linear_interpolation_indexes(key))
while len(weightedKeys) > 1:
newKeys = []
for key1 in weightedKeys[-2]:
for key2 in weightedKeys[-1]:
newKeys.append({'key':key1['key'] + key2['key'],
'weight':key1['weight']*key2['weight']})
weightedKeys.pop(-1)
weightedKeys[-1] = newKeys
return weightedKeys[0]
def bounds(self):
return [dim.bounds() for dim in self.dims]
def span(self):
return [dim.span() for dim in self.dims]
|
[
"numpy.array",
"nephelae.types.Bounds",
"numpy.linspace"
] |
[((3885, 3922), 'nephelae.types.Bounds', 'Bounds', (['maxSlice.start', 'maxSlice.stop'], {}), '(maxSlice.start, maxSlice.stop)\n', (3891, 3922), False, 'from nephelae.types import Bounds\n'), ((5525, 5571), 'numpy.linspace', 'np.linspace', (['(0)', '(self.dimSize - 1)', 'self.dimSize'], {}), '(0, self.dimSize - 1, self.dimSize)\n', (5536, 5571), True, 'import numpy as np\n'), ((5609, 5632), 'numpy.array', 'np.array', (['inputToOutput'], {}), '(inputToOutput)\n', (5617, 5632), True, 'import numpy as np\n'), ((5666, 5689), 'numpy.array', 'np.array', (['inputToOutput'], {}), '(inputToOutput)\n', (5674, 5689), True, 'import numpy as np\n')]
|
import numpy as np
import scipy.sparse as sp
def calc_A_norm_hat(edge_index, weights=None):
if weights is None:
weights = np.ones(edge_index.shape[1])
sparse_adj = sp.coo_matrix((weights, (edge_index[0], edge_index[1])))
nnodes = sparse_adj.shape[0]
A = sparse_adj + sp.eye(nnodes)
D_vec = np.sum(A, axis=1).A1
D_vec_invsqrt_corr = 1 / np.sqrt(D_vec)
D_invsqrt_corr = sp.diags(D_vec_invsqrt_corr)
return D_invsqrt_corr @ A @ D_invsqrt_corr
|
[
"scipy.sparse.diags",
"numpy.sum",
"numpy.ones",
"scipy.sparse.coo_matrix",
"scipy.sparse.eye",
"numpy.sqrt"
] |
[((181, 237), 'scipy.sparse.coo_matrix', 'sp.coo_matrix', (['(weights, (edge_index[0], edge_index[1]))'], {}), '((weights, (edge_index[0], edge_index[1])))\n', (194, 237), True, 'import scipy.sparse as sp\n'), ((405, 433), 'scipy.sparse.diags', 'sp.diags', (['D_vec_invsqrt_corr'], {}), '(D_vec_invsqrt_corr)\n', (413, 433), True, 'import scipy.sparse as sp\n'), ((135, 163), 'numpy.ones', 'np.ones', (['edge_index.shape[1]'], {}), '(edge_index.shape[1])\n', (142, 163), True, 'import numpy as np\n'), ((292, 306), 'scipy.sparse.eye', 'sp.eye', (['nnodes'], {}), '(nnodes)\n', (298, 306), True, 'import scipy.sparse as sp\n'), ((319, 336), 'numpy.sum', 'np.sum', (['A'], {'axis': '(1)'}), '(A, axis=1)\n', (325, 336), True, 'import numpy as np\n'), ((369, 383), 'numpy.sqrt', 'np.sqrt', (['D_vec'], {}), '(D_vec)\n', (376, 383), True, 'import numpy as np\n')]
|
import cv2
import numpy as np
import imutils
from ar_overlay_2d import AR2D
# read in images to be used for query and overlay
query_image = cv2.imread('images/crossword_query.png')
ar_image = cv2.imread('images/smash_box_art.png')
# init ar 2d overlay
ar2d = AR2D(query_image, ar_image, min_match_count=200)
# open webcam
camera = cv2.VideoCapture(0)
# loop over the frames of the video
while True:
# grab the current frame
grabbed, frame = camera.read()
frame = imutils.resize(frame, width=1000)
frame_clone = frame.copy()
# break if frame not grabbed
if not grabbed:
break
try:
# apply 2d ar overlay
ar_frame = ar2d.ar_2d_overlay(frame)
except cv2.error:
ar_frame = frame_clone
frame_clone = imutils.resize(frame_clone, width=500)
ar_frame = imutils.resize(ar_frame, width=500)
comparison = np.vstack((frame_clone, ar_frame))
# display results
cv2.imshow('AR 2D (press Q to quit)', comparison)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
camera.release()
cv2.destroyAllWindows()
|
[
"cv2.waitKey",
"cv2.imshow",
"cv2.VideoCapture",
"cv2.imread",
"ar_overlay_2d.AR2D",
"imutils.resize",
"cv2.destroyAllWindows",
"numpy.vstack"
] |
[((141, 181), 'cv2.imread', 'cv2.imread', (['"""images/crossword_query.png"""'], {}), "('images/crossword_query.png')\n", (151, 181), False, 'import cv2\n'), ((193, 231), 'cv2.imread', 'cv2.imread', (['"""images/smash_box_art.png"""'], {}), "('images/smash_box_art.png')\n", (203, 231), False, 'import cv2\n'), ((261, 309), 'ar_overlay_2d.AR2D', 'AR2D', (['query_image', 'ar_image'], {'min_match_count': '(200)'}), '(query_image, ar_image, min_match_count=200)\n', (265, 309), False, 'from ar_overlay_2d import AR2D\n'), ((334, 353), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (350, 353), False, 'import cv2\n'), ((1079, 1102), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1100, 1102), False, 'import cv2\n'), ((480, 513), 'imutils.resize', 'imutils.resize', (['frame'], {'width': '(1000)'}), '(frame, width=1000)\n', (494, 513), False, 'import imutils\n'), ((770, 808), 'imutils.resize', 'imutils.resize', (['frame_clone'], {'width': '(500)'}), '(frame_clone, width=500)\n', (784, 808), False, 'import imutils\n'), ((824, 859), 'imutils.resize', 'imutils.resize', (['ar_frame'], {'width': '(500)'}), '(ar_frame, width=500)\n', (838, 859), False, 'import imutils\n'), ((878, 912), 'numpy.vstack', 'np.vstack', (['(frame_clone, ar_frame)'], {}), '((frame_clone, ar_frame))\n', (887, 912), True, 'import numpy as np\n'), ((940, 989), 'cv2.imshow', 'cv2.imshow', (['"""AR 2D (press Q to quit)"""', 'comparison'], {}), "('AR 2D (press Q to quit)', comparison)\n", (950, 989), False, 'import cv2\n'), ((1000, 1014), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1011, 1014), False, 'import cv2\n')]
|
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
mpl.rcParams['font.sans-serif'] = ['SimHei'] # 指定默认字体
mpl.rcParams['axes.unicode_minus'] = False # 解决保存图像是负号'-'显示为方块的问题
np.random.seed(19260817)
def d2():
# x轴采样点
x = np.linspace(0, 5, 100)
# 通过下面曲线加上噪声生成数据,所以拟合模型用y
y = 2 * np.sin(x) + 0.3 * x**2
y_data = y + np.random.normal(scale=0.3, size=100)
# 指定 figure 图表名称
plt.figure('data')
# '.' 标明画散点图 每个散点的形状是园
plt.plot(x, y_data, '.')
# 画模型的图,plot函数默认画连线图
plt.figure('model')
plt.plot(x, y)
# 两个图画一起
plt.figure('data & model')
# 通过'k'指定线的颜色,lw指定线的宽度
# 第三个参数除了颜色也可以指定线形,比如'r--'表示红色虚线
# 更多属性可以参考官网:http://matplotlib.org/api/pyplot_api.html
plt.plot(x, y, 'k', lw=2)
# scatter可以更容易地生成散点图
plt.scatter(x, y_data)
# 保存当前图片
plt.savefig('./data/result.png')
# 显示图像
plt.show()
def histogram():
mpl.rcParams['axes.titlesize'] = 20
mpl.rcParams['xtick.labelsize'] = 16
mpl.rcParams['ytick.labelsize'] = 16
mpl.rcParams['axes.labelsize'] = 16
mpl.rcParams['xtick.major.size'] = 0
mpl.rcParams['ytick.major.size'] = 0
# 包含了狗,猫和猎豹的最高奔跑速度,还有对应的可视化颜色
speed_map = {
'dog': (48, '#7199cf'),
'cat': (45, '#4fc4aa'),
'cheetah': (120, '#e1a7a2')
}
fig = plt.figure("Bar chart & Pie chart")
# 在整张图上加入一个子图,121的意思是在一个1行2列的子图中的第一张
ax = fig.add_subplot(121)
ax.set_title('Running speed - bar chart')
# 生成x轴每个元素的位置
xticks = np.arange(3)
# 定义柱状图的宽度
bar_width = 0.5
# 动物名字
animals = speed_map.keys()
# 速度
speeds = [x[0] for x in speed_map.values()]
# 颜色
colors = [x[1] for x in speed_map.values()]
# 画柱状图,横轴是动物标签的位置,纵轴是速度,定义柱的宽度,同时设置柱的边缘为透明
# xticks + bar_width / 2 柱位置在刻度中央
bars = ax.bar(xticks + bar_width / 2, speeds, width=bar_width, edgecolor='none')
# 设置y轴的标题
ax.set_ylabel('Speed(km/h)')
# x轴每个标签的具体位置,设置为每个柱的中央
ax.set_xticks(xticks + bar_width / 2)
# 设置每个标签的名字
ax.set_xticklabels(animals)
# 设置x轴的范围
ax.set_xlim([bar_width / 2 - 0.5, 3 - bar_width / 2])
# 设置y轴的范围
ax.set_ylim([0, 125])
# 给每个bar分配指定的颜色
for bar, color in zip(bars, colors):
bar.set_color(color)
# 在122位置加入新的图
ax = fig.add_subplot(122)
ax.set_title('Running speed - pie chart')
# 生成同时包含名称和速度的标签
labels = ['{}\n{} km/h'.format(animal, speed) for animal, speed in zip(animals, speeds)]
# 画饼状图,并指定标签和对应颜色
ax.pie(speeds, labels=labels, colors=colors)
plt.show()
def d3():
n_grids = 51 # x-y平面的格点数
c =int(n_grids / 2) # 中心位置
nf = 2 # 低频成分的个数
# 生成格点
x = np.linspace(0, 1, n_grids)
y = np.linspace(0, 1, n_grids)
# x和y是长度为n_grids的array
# meshgrid会把x和y组合成n_grids*n_grids的array,X和Y对应位置就是所有格点的坐标
X, Y = np.meshgrid(x, y)
# 生成一个0值的傅里叶谱
spectrum = np.zeros((n_grids, n_grids), dtype=np.complex)
# 生成一段噪音,长度是(2*nf+1)**2/2
t=int((2*nf+1)**2/2)
noise = [np.complex(x, y) for x, y in np.random.uniform(-1.0,1.0,(t, 2))]
# 傅里叶频谱的每一项和其共轭关于中心对称
noisy_block = np.concatenate((noise, [0j], np.conjugate(noise[::-1])))
# 将生成的频谱作为低频成分
spectrum[c-nf:c+nf+1, c-nf:c+nf+1] = noisy_block.reshape((2*nf+1, 2*nf+1))
# 进行反傅里叶变换
Z = np.real(np.fft.ifft2(np.fft.ifftshift(spectrum)))
# 创建图表
fig = plt.figure('3D surface & wire')
# 第一个子图,surface图
ax = fig.add_subplot(1, 2, 1, projection='3d')
# alpha定义透明度,cmap是color map
# rstride和cstride是两个方向上的采样,越小越精细,lw是线宽
ax.plot_surface(X, Y, Z, alpha=0.7, cmap='jet', rstride=1, cstride=1, lw=0)
# 第二个子图,网线图
ax = fig.add_subplot(1, 2, 2, projection='3d')
ax.plot_wireframe(X, Y, Z, rstride=3, cstride=3, lw=0.5)
plt.show()
def dynamic():
"""
动态图
"""
fig,ax=plt.subplots()
y1=[]
for i in range(50):
y1.append(i)
ax.cla()
ax.bar(y1,label='test',height=y1,width=0.3)
ax.legend()
plt.pause(0.2)
def draw_normal():
# 绘制普通图像
x = np.linspace(-1, 1, 50)
y1 = 2 * x + 1
y2 = x**2
plt.figure()
# 在绘制时设置lable, 逗号是必须的
l1, = plt.plot(x, y1)
l2, = plt.plot(x, y2, color='red',linewidth=1.0, linestyle='--')
plt.xlabel('x axis')
plt.ylabel('y axis')
# 设置legend
plt.legend(handles=[l1, l2, ], labels=['a', 'b'], loc='upper left')
plt.show()
def linaer():
# data = np.arange(100, 201)
# plt.plot(data)
# plt.show()
# data = np.arange(100, 201)
# plt.plot(data)
# data2 = np.arange(200, 301)
# plt.figure()
# plt.plot(data2)
# plt.show()
# plot函数的第一个数组是横轴的值,第二个数组是纵轴的值,所以它们一个是直线,一个是折线;
# data = np.arange(100, 201)
# plt.subplot(2, 1, 1)
# plt.plot(data)
# data2 = np.arange(200, 301)
# plt.subplot(2, 1, 2)
# plt.plot(data2)
# plt.show()
# 参数c表示点的颜色,s是点的大小,alpha是透明度
N = 20
plt.scatter(np.random.rand(N) * 100, np.random.rand(N)* 100, c='r', s=100, alpha=0.5)
plt.scatter(np.random.rand(N) * 100, np.random.rand(N)* 100, c='g', s=200, alpha=0.5)
plt.scatter(np.random.rand(N) * 100, np.random.rand(N)* 100, c='b', s=300, alpha=0.5)
plt.show()
def pie():
"""
饼图 显示两个图
"""
labels = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
data = np.random.rand(7)*100
# plt.subplot(2, 1, 1) # 2行1列subplot中的第1个subplot。
plt.subplot(1, 2, 1)
plt.pie(data, labels=labels, autopct='%1.1f%%')
plt.axis("equal")
plt.legend()
data1 = np.random.rand(7)*100
plt.subplot(1, 2, 2)
# autopct指定了数值的精度格式
plt.pie(data1, labels=labels, autopct='%1.1f%%')
plt.axis("equal")
plt.legend()
plt.show()
def bar():
"""
条形图
"""
# np.random.rand(N * 3).reshape(N, -1)表示先生成21(N x 3)个随机数,然后将它们组装成7行,那么每行就是三个数,这对应了颜色的三个组成部分。
N = 7
x = np.arange(N)
data = np.random.randint(low=0, high=100, size=N)
colors = np.random.rand(N * 3).reshape(N, -1)
labels = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
plt.title("Weekday Data")
plt.bar(x, data, alpha=0.8, color=colors, tick_label=labels)
plt.show()
def hist():
"""
直方图
"""
# 生成了包含了三个数组的数组
data = [np.random.randint(0, n, n) for n in [3000, 4000, 5000]]
labels = ['3K', '4K', '5K']
# bins数组用来指定我们显示的直方图的边界,即:[0, 100) 会有一个数据点,[100, 500)会有一个数据点
bins = [0, 100, 500, 1000, 2000, 3000, 4000, 5000]
plt.hist(data, bins=bins, label=labels)
plt.legend()
plt.show()
if __name__ == '__main__':
# linaer()
# d2()
# histogram()
# d3()
# draw_normal()
# dynamic()
# pie()
# bar()
hist()
|
[
"matplotlib.pyplot.title",
"numpy.random.seed",
"matplotlib.pyplot.bar",
"matplotlib.pyplot.figure",
"numpy.random.randint",
"numpy.arange",
"numpy.sin",
"numpy.random.normal",
"numpy.conjugate",
"numpy.fft.ifftshift",
"numpy.meshgrid",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"numpy.complex",
"matplotlib.pyplot.pause",
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.pie",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.subplot",
"numpy.random.uniform",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.hist",
"matplotlib.pyplot.scatter",
"numpy.zeros",
"matplotlib.pyplot.axis",
"numpy.random.rand",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((263, 287), 'numpy.random.seed', 'np.random.seed', (['(19260817)'], {}), '(19260817)\n', (277, 287), True, 'import numpy as np\n'), ((320, 342), 'numpy.linspace', 'np.linspace', (['(0)', '(5)', '(100)'], {}), '(0, 5, 100)\n', (331, 342), True, 'import numpy as np\n'), ((489, 507), 'matplotlib.pyplot.figure', 'plt.figure', (['"""data"""'], {}), "('data')\n", (499, 507), True, 'import matplotlib.pyplot as plt\n'), ((540, 564), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_data', '"""."""'], {}), "(x, y_data, '.')\n", (548, 564), True, 'import matplotlib.pyplot as plt\n'), ((595, 614), 'matplotlib.pyplot.figure', 'plt.figure', (['"""model"""'], {}), "('model')\n", (605, 614), True, 'import matplotlib.pyplot as plt\n'), ((619, 633), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {}), '(x, y)\n', (627, 633), True, 'import matplotlib.pyplot as plt\n'), ((651, 677), 'matplotlib.pyplot.figure', 'plt.figure', (['"""data & model"""'], {}), "('data & model')\n", (661, 677), True, 'import matplotlib.pyplot as plt\n'), ((805, 830), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y', '"""k"""'], {'lw': '(2)'}), "(x, y, 'k', lw=2)\n", (813, 830), True, 'import matplotlib.pyplot as plt\n'), ((860, 882), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', 'y_data'], {}), '(x, y_data)\n', (871, 882), True, 'import matplotlib.pyplot as plt\n'), ((900, 932), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./data/result.png"""'], {}), "('./data/result.png')\n", (911, 932), True, 'import matplotlib.pyplot as plt\n'), ((948, 958), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (956, 958), True, 'import matplotlib.pyplot as plt\n'), ((1392, 1427), 'matplotlib.pyplot.figure', 'plt.figure', (['"""Bar chart & Pie chart"""'], {}), "('Bar chart & Pie chart')\n", (1402, 1427), True, 'import matplotlib.pyplot as plt\n'), ((1576, 1588), 'numpy.arange', 'np.arange', (['(3)'], {}), '(3)\n', (1585, 1588), True, 'import numpy as np\n'), ((2605, 2615), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2613, 2615), True, 'import matplotlib.pyplot as plt\n'), ((2757, 2783), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'n_grids'], {}), '(0, 1, n_grids)\n', (2768, 2783), True, 'import numpy as np\n'), ((2792, 2818), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'n_grids'], {}), '(0, 1, n_grids)\n', (2803, 2818), True, 'import numpy as np\n'), ((2918, 2935), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (2929, 2935), True, 'import numpy as np\n'), ((2969, 3015), 'numpy.zeros', 'np.zeros', (['(n_grids, n_grids)'], {'dtype': 'np.complex'}), '((n_grids, n_grids), dtype=np.complex)\n', (2977, 3015), True, 'import numpy as np\n'), ((3451, 3482), 'matplotlib.pyplot.figure', 'plt.figure', (['"""3D surface & wire"""'], {}), "('3D surface & wire')\n", (3461, 3482), True, 'import matplotlib.pyplot as plt\n'), ((3843, 3853), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3851, 3853), True, 'import matplotlib.pyplot as plt\n'), ((3906, 3920), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3918, 3920), True, 'import matplotlib.pyplot as plt\n'), ((4130, 4152), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', '(50)'], {}), '(-1, 1, 50)\n', (4141, 4152), True, 'import numpy as np\n'), ((4191, 4203), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4201, 4203), True, 'import matplotlib.pyplot as plt\n'), ((4240, 4255), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y1'], {}), '(x, y1)\n', (4248, 4255), True, 'import matplotlib.pyplot as plt\n'), ((4266, 4325), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y2'], {'color': '"""red"""', 'linewidth': '(1.0)', 'linestyle': '"""--"""'}), "(x, y2, color='red', linewidth=1.0, linestyle='--')\n", (4274, 4325), True, 'import matplotlib.pyplot as plt\n'), ((4329, 4349), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x axis"""'], {}), "('x axis')\n", (4339, 4349), True, 'import matplotlib.pyplot as plt\n'), ((4354, 4374), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""y axis"""'], {}), "('y axis')\n", (4364, 4374), True, 'import matplotlib.pyplot as plt\n'), ((4394, 4459), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'handles': '[l1, l2]', 'labels': "['a', 'b']", 'loc': '"""upper left"""'}), "(handles=[l1, l2], labels=['a', 'b'], loc='upper left')\n", (4404, 4459), True, 'import matplotlib.pyplot as plt\n'), ((4467, 4477), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4475, 4477), True, 'import matplotlib.pyplot as plt\n'), ((5268, 5278), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5276, 5278), True, 'import matplotlib.pyplot as plt\n'), ((5479, 5499), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (5490, 5499), True, 'import matplotlib.pyplot as plt\n'), ((5504, 5551), 'matplotlib.pyplot.pie', 'plt.pie', (['data'], {'labels': 'labels', 'autopct': '"""%1.1f%%"""'}), "(data, labels=labels, autopct='%1.1f%%')\n", (5511, 5551), True, 'import matplotlib.pyplot as plt\n'), ((5556, 5573), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (5564, 5573), True, 'import matplotlib.pyplot as plt\n'), ((5578, 5590), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5588, 5590), True, 'import matplotlib.pyplot as plt\n'), ((5630, 5650), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (5641, 5650), True, 'import matplotlib.pyplot as plt\n'), ((5679, 5727), 'matplotlib.pyplot.pie', 'plt.pie', (['data1'], {'labels': 'labels', 'autopct': '"""%1.1f%%"""'}), "(data1, labels=labels, autopct='%1.1f%%')\n", (5686, 5727), True, 'import matplotlib.pyplot as plt\n'), ((5735, 5752), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (5743, 5752), True, 'import matplotlib.pyplot as plt\n'), ((5757, 5769), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5767, 5769), True, 'import matplotlib.pyplot as plt\n'), ((5775, 5785), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5783, 5785), True, 'import matplotlib.pyplot as plt\n'), ((5938, 5950), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (5947, 5950), True, 'import numpy as np\n'), ((5962, 6004), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(100)', 'size': 'N'}), '(low=0, high=100, size=N)\n', (5979, 6004), True, 'import numpy as np\n'), ((6123, 6148), 'matplotlib.pyplot.title', 'plt.title', (['"""Weekday Data"""'], {}), "('Weekday Data')\n", (6132, 6148), True, 'import matplotlib.pyplot as plt\n'), ((6153, 6213), 'matplotlib.pyplot.bar', 'plt.bar', (['x', 'data'], {'alpha': '(0.8)', 'color': 'colors', 'tick_label': 'labels'}), '(x, data, alpha=0.8, color=colors, tick_label=labels)\n', (6160, 6213), True, 'import matplotlib.pyplot as plt\n'), ((6218, 6228), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6226, 6228), True, 'import matplotlib.pyplot as plt\n'), ((6511, 6550), 'matplotlib.pyplot.hist', 'plt.hist', (['data'], {'bins': 'bins', 'label': 'labels'}), '(data, bins=bins, label=labels)\n', (6519, 6550), True, 'import matplotlib.pyplot as plt\n'), ((6555, 6567), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (6565, 6567), True, 'import matplotlib.pyplot as plt\n'), ((6573, 6583), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6581, 6583), True, 'import matplotlib.pyplot as plt\n'), ((425, 462), 'numpy.random.normal', 'np.random.normal', ([], {'scale': '(0.3)', 'size': '(100)'}), '(scale=0.3, size=100)\n', (441, 462), True, 'import numpy as np\n'), ((3088, 3104), 'numpy.complex', 'np.complex', (['x', 'y'], {}), '(x, y)\n', (3098, 3104), True, 'import numpy as np\n'), ((4073, 4087), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.2)'], {}), '(0.2)\n', (4082, 4087), True, 'import matplotlib.pyplot as plt\n'), ((5396, 5413), 'numpy.random.rand', 'np.random.rand', (['(7)'], {}), '(7)\n', (5410, 5413), True, 'import numpy as np\n'), ((5604, 5621), 'numpy.random.rand', 'np.random.rand', (['(7)'], {}), '(7)\n', (5618, 5621), True, 'import numpy as np\n'), ((6299, 6325), 'numpy.random.randint', 'np.random.randint', (['(0)', 'n', 'n'], {}), '(0, n, n)\n', (6316, 6325), True, 'import numpy as np\n'), ((385, 394), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (391, 394), True, 'import numpy as np\n'), ((3117, 3153), 'numpy.random.uniform', 'np.random.uniform', (['(-1.0)', '(1.0)', '(t, 2)'], {}), '(-1.0, 1.0, (t, 2))\n', (3134, 3153), True, 'import numpy as np\n'), ((3226, 3251), 'numpy.conjugate', 'np.conjugate', (['noise[::-1]'], {}), '(noise[::-1])\n', (3238, 3251), True, 'import numpy as np\n'), ((3401, 3427), 'numpy.fft.ifftshift', 'np.fft.ifftshift', (['spectrum'], {}), '(spectrum)\n', (3417, 3427), True, 'import numpy as np\n'), ((5010, 5027), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (5024, 5027), True, 'import numpy as np\n'), ((5035, 5052), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (5049, 5052), True, 'import numpy as np\n'), ((5100, 5117), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (5114, 5117), True, 'import numpy as np\n'), ((5125, 5142), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (5139, 5142), True, 'import numpy as np\n'), ((5190, 5207), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (5204, 5207), True, 'import numpy as np\n'), ((5215, 5232), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (5229, 5232), True, 'import numpy as np\n'), ((6018, 6039), 'numpy.random.rand', 'np.random.rand', (['(N * 3)'], {}), '(N * 3)\n', (6032, 6039), True, 'import numpy as np\n')]
|
import numpy as np
import warnings
warnings.filterwarnings("ignore")
from pathlib import Path
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from scipy.io import wavfile
import pyworld as pw
from scipy import signal
from util.utility import separate_speaker, get_separated_values
from LSTMLM.LSTMLM_generater import LSTMLM_generator
from DSAEPBHL_generator.DSAEPBHL_generator import DSAEPBHL_generator
from Basic_generator.AP_generator import AP_generator
from Basic_generator.F0_generator import F0_generator
from NPBDAA_generator.NPBDAA_generator import NPBDAA_generator
from NPBDAA_LM.Bigram_generator import Bigram_generator
from NPBDAA_LM.Unigram_generator import Unigram_generator
def denorm_mcep(generated_mcep, domain_min, domain_max):
mcep = (generated_mcep + 1) / 2
mcep *= (domain_max - domain_min)
mcep += domain_min
return mcep
parser = ArgumentParser(fromfile_prefix_chars='@', formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("--samplerate", type=int, default=48000)
parser.add_argument("--fftsize", type=int, default=1024)
parser.add_argument("--frame_period", type=float, default=5E-3)
parser.add_argument("--sentences_file", type=Path, required=True)
parser.add_argument("--letter_num", type=int, required=True)
parser.add_argument("--letter_stateseq", type=Path, required=True)
parser.add_argument("--ap", type=Path, required=True)
parser.add_argument("--f0", type=Path, required=True)
parser.add_argument("--flat_f0", action="store_true")
parser.add_argument("--flat_ap", action="store_true")
parser.add_argument("--parameter", type=Path)
parser.add_argument("--speaker_id", type=Path, required=True)
parser.add_argument("--mcep_norm_param", type=Path, nargs=2, required=True)
parser.add_argument("--target_speaker", type=str)
parser.add_argument("--dsae_param", type=Path, required=True)
parser.add_argument("--pb_param", type=Path, required=True)
parser.add_argument("--output_prefix", type=Path)
parser.add_argument("--sentences", action="append", type=int, nargs="+")
parser.add_argument("--size", type=int, default=1)
parser.add_argument("--mode", choices=["ML", "RND"], default="ML")
parser.add_argument("--LM", choices=["LSTM", "Bigram", "Unigram"])
parser.add_argument("--unique", action="store_true")
parser.add_argument("--LSTM_model", type=Path)
args = parser.parse_args()
speakers, spkind_keys = separate_speaker(np.load(args.speaker_id))
speaker_num = len(speakers)
target_idx = speakers.index(args.target_speaker)
src_letter_stateseq = get_separated_values(np.load(args.letter_stateseq), spkind_keys)[target_idx]
src_f0 = get_separated_values(np.load(args.f0), spkind_keys)[target_idx]
src_ap = get_separated_values(np.load(args.ap), spkind_keys)[target_idx]
mcep_min = np.load(args.mcep_norm_param[0])
mcep_max = np.load(args.mcep_norm_param[1])
if args.sentences is None:
if args.LM == "Unigram":
snt_generator = Unigram_generator(args.sentences_file)
elif args.LM == "Bigram":
snt_generator = Bigram_generator(args.sentences_file, args.parameter)
elif args.LM == "LSTM":
snt_generator = LSTMLM_generator(args.LSTM_model, args.sentences_file)
ap_generator = AP_generator(args.letter_num, src_ap, letter_stateseq=src_letter_stateseq, flat=args.flat_ap, mode=args.mode)
f0_generator = F0_generator(args.letter_num, src_f0, letter_stateseq=src_letter_stateseq, flat=args.flat_f0, mode=args.mode)
feat_generator = NPBDAA_generator(args.parameter, mode=args.mode)
mcep_generator = DSAEPBHL_generator(args.dsae_param, args.pb_param)
if args.sentences is None:
sentences = snt_generator.generate(size=args.size, unique=args.unique)
else:
sentences = args.sentences
print(f"sentences: {sentences}")
for s, snt in enumerate(sentences):
feature, gen_letter_stateseq = feat_generator.generate(snt)
mcep = mcep_generator.generate(feature, args.target_speaker)
ap = ap_generator.generate(gen_letter_stateseq)
f0 = f0_generator.generate(gen_letter_stateseq)
f0[f0<0] = 0
mcep = denorm_mcep(mcep, mcep_min, mcep_max)
mcep = signal.medfilt(mcep, (5, 1))
mcep = mcep.astype(float, order="C")
decoded_sp = pw.decode_spectral_envelope(mcep, args.samplerate, args.fftsize)
synthesized = pw.synthesize(f0, decoded_sp, ap, args.samplerate, frame_period=args.frame_period*1000)
synthesized = synthesized / max(abs(synthesized)) * 30000
args.output_prefix.parent.mkdir(parents=True, exist_ok=True)
out_file = args.output_prefix.with_name(f"{args.output_prefix.name}_{s:02d}_({'_'.join(map(str, snt))}).wav")
wavfile.write(out_file, args.samplerate, synthesized.astype(np.int16))
|
[
"numpy.load",
"pyworld.synthesize",
"NPBDAA_LM.Bigram_generator.Bigram_generator",
"argparse.ArgumentParser",
"warnings.filterwarnings",
"scipy.signal.medfilt",
"LSTMLM.LSTMLM_generater.LSTMLM_generator",
"pyworld.decode_spectral_envelope",
"Basic_generator.F0_generator.F0_generator",
"Basic_generator.AP_generator.AP_generator",
"NPBDAA_LM.Unigram_generator.Unigram_generator",
"DSAEPBHL_generator.DSAEPBHL_generator.DSAEPBHL_generator",
"NPBDAA_generator.NPBDAA_generator.NPBDAA_generator"
] |
[((35, 68), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (58, 68), False, 'import warnings\n'), ((888, 981), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'fromfile_prefix_chars': '"""@"""', 'formatter_class': 'ArgumentDefaultsHelpFormatter'}), "(fromfile_prefix_chars='@', formatter_class=\n ArgumentDefaultsHelpFormatter)\n", (902, 981), False, 'from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter\n'), ((2776, 2808), 'numpy.load', 'np.load', (['args.mcep_norm_param[0]'], {}), '(args.mcep_norm_param[0])\n', (2783, 2808), True, 'import numpy as np\n'), ((2820, 2852), 'numpy.load', 'np.load', (['args.mcep_norm_param[1]'], {}), '(args.mcep_norm_param[1])\n', (2827, 2852), True, 'import numpy as np\n'), ((3204, 3317), 'Basic_generator.AP_generator.AP_generator', 'AP_generator', (['args.letter_num', 'src_ap'], {'letter_stateseq': 'src_letter_stateseq', 'flat': 'args.flat_ap', 'mode': 'args.mode'}), '(args.letter_num, src_ap, letter_stateseq=src_letter_stateseq,\n flat=args.flat_ap, mode=args.mode)\n', (3216, 3317), False, 'from Basic_generator.AP_generator import AP_generator\n'), ((3329, 3442), 'Basic_generator.F0_generator.F0_generator', 'F0_generator', (['args.letter_num', 'src_f0'], {'letter_stateseq': 'src_letter_stateseq', 'flat': 'args.flat_f0', 'mode': 'args.mode'}), '(args.letter_num, src_f0, letter_stateseq=src_letter_stateseq,\n flat=args.flat_f0, mode=args.mode)\n', (3341, 3442), False, 'from Basic_generator.F0_generator import F0_generator\n'), ((3456, 3504), 'NPBDAA_generator.NPBDAA_generator.NPBDAA_generator', 'NPBDAA_generator', (['args.parameter'], {'mode': 'args.mode'}), '(args.parameter, mode=args.mode)\n', (3472, 3504), False, 'from NPBDAA_generator.NPBDAA_generator import NPBDAA_generator\n'), ((3523, 3573), 'DSAEPBHL_generator.DSAEPBHL_generator.DSAEPBHL_generator', 'DSAEPBHL_generator', (['args.dsae_param', 'args.pb_param'], {}), '(args.dsae_param, args.pb_param)\n', (3541, 3573), False, 'from DSAEPBHL_generator.DSAEPBHL_generator import DSAEPBHL_generator\n'), ((2416, 2440), 'numpy.load', 'np.load', (['args.speaker_id'], {}), '(args.speaker_id)\n', (2423, 2440), True, 'import numpy as np\n'), ((4096, 4124), 'scipy.signal.medfilt', 'signal.medfilt', (['mcep', '(5, 1)'], {}), '(mcep, (5, 1))\n', (4110, 4124), False, 'from scipy import signal\n'), ((4184, 4248), 'pyworld.decode_spectral_envelope', 'pw.decode_spectral_envelope', (['mcep', 'args.samplerate', 'args.fftsize'], {}), '(mcep, args.samplerate, args.fftsize)\n', (4211, 4248), True, 'import pyworld as pw\n'), ((4267, 4361), 'pyworld.synthesize', 'pw.synthesize', (['f0', 'decoded_sp', 'ap', 'args.samplerate'], {'frame_period': '(args.frame_period * 1000)'}), '(f0, decoded_sp, ap, args.samplerate, frame_period=args.\n frame_period * 1000)\n', (4280, 4361), True, 'import pyworld as pw\n'), ((2563, 2592), 'numpy.load', 'np.load', (['args.letter_stateseq'], {}), '(args.letter_stateseq)\n', (2570, 2592), True, 'import numpy as np\n'), ((2649, 2665), 'numpy.load', 'np.load', (['args.f0'], {}), '(args.f0)\n', (2656, 2665), True, 'import numpy as np\n'), ((2722, 2738), 'numpy.load', 'np.load', (['args.ap'], {}), '(args.ap)\n', (2729, 2738), True, 'import numpy as np\n'), ((2934, 2972), 'NPBDAA_LM.Unigram_generator.Unigram_generator', 'Unigram_generator', (['args.sentences_file'], {}), '(args.sentences_file)\n', (2951, 2972), False, 'from NPBDAA_LM.Unigram_generator import Unigram_generator\n'), ((3027, 3080), 'NPBDAA_LM.Bigram_generator.Bigram_generator', 'Bigram_generator', (['args.sentences_file', 'args.parameter'], {}), '(args.sentences_file, args.parameter)\n', (3043, 3080), False, 'from NPBDAA_LM.Bigram_generator import Bigram_generator\n'), ((3133, 3187), 'LSTMLM.LSTMLM_generater.LSTMLM_generator', 'LSTMLM_generator', (['args.LSTM_model', 'args.sentences_file'], {}), '(args.LSTM_model, args.sentences_file)\n', (3149, 3187), False, 'from LSTMLM.LSTMLM_generater import LSTMLM_generator\n')]
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
import pytest
import pickle
import os
from .. import Quat, normalize
def indices(t):
import itertools
for k in itertools.product(*[range(i) for i in t]):
yield k
def normalize_angles(x, xmin, xmax):
while np.any(x >= xmax):
x -= np.where(x > xmax, 360, 0)
while np.any(x < xmin):
x += np.where(x < xmin, 360, 0)
ra = 10.
dec = 20.
roll = 30.
q0 = Quat([ra, dec, roll])
equatorial_23 = np.array([[[10, 20, 30],
[10, 20, -30],
[10, -60, 30]],
[[10, 20, 0],
[10, 50, 30],
[10, -50, -30]]], dtype=float)
q_23 = np.zeros(equatorial_23[..., 0].shape + (4,))
for _i, _j in indices(equatorial_23.shape[:-1]):
q_23[_i, _j] = Quat(equatorial_23[_i, _j]).q
transform_23 = np.zeros(equatorial_23[..., 0].shape + (3, 3))
for _i, _j in indices(transform_23.shape[:-2]):
transform_23[_i, _j] = Quat(equatorial_23[_i, _j]).transform
def test_shape():
q = Quat(q=np.zeros(4,))
assert q.shape == ()
with pytest.raises(AttributeError):
q.shape = (4,)
def test_init_exceptions():
with pytest.raises(TypeError):
_ = Quat(q=np.zeros((3, ))) # old-style API, wrong shape
with pytest.raises(TypeError):
_ = Quat(equatorial=np.zeros((4, ))) # old-style API, wrong shape
with pytest.raises(TypeError):
_ = Quat(transform=np.zeros((4, ))) # old-style API, wrong shape
with pytest.raises(TypeError):
_ = Quat(np.zeros((2, ))) # old-style API, wrong shape
with pytest.raises(TypeError):
_ = Quat(np.zeros((5, ))) # old-style API, wrong shape
with pytest.raises(TypeError):
_ = Quat(equatorial_23) # old-style API, wrong shape
with pytest.raises(TypeError):
_ = Quat(q_23) # old-style API, wrong shape
with pytest.raises(TypeError):
_ = Quat(transform_23) # old-style API, wrong shape
with pytest.raises(ValueError):
_ = Quat(q=np.zeros(4), transform=np.zeros((3, 3))) # too many arguments
with pytest.raises(ValueError):
_ = Quat(q=np.zeros(4), equatorial=np.zeros(3)) # too many arguments
with pytest.raises(ValueError):
_ = Quat(equatorial=np.zeros(3), transform=np.zeros((3, 3))) # too many arguments
with pytest.raises(ValueError):
# too many arguments
_ = Quat(q=np.zeros(4), transform=np.zeros((3, 3)), equatorial=np.zeros(3))
with pytest.raises(ValueError):
_ = Quat(q=[[[1., 0., 0., 1.]]]) # q not normalized
with pytest.raises(ValueError):
_ = Quat([0, 1, 's']) # could not convert string to float
def test_from_q():
q = [0.26853582, -0.14487813, 0.12767944, 0.94371436]
q1 = Quat(q)
q2 = Quat(q=q)
q3 = Quat(q1)
q = np.array(q)
assert np.all(q1.q == q)
assert np.all(q2.q == q)
assert np.all(q3.q == q)
def test_from_eq():
q = Quat([ra, dec, roll])
assert np.allclose(q.q[0], 0.26853582)
assert np.allclose(q.q[1], -0.14487813)
assert np.allclose(q.q[2], 0.12767944)
assert np.allclose(q.q[3], 0.94371436)
assert np.allclose(q.roll0, 30)
assert np.allclose(q.ra0, 10)
assert q.pitch == -q.dec
assert q.yaw == q.ra0
q1 = Quat(equatorial=[ra, dec, roll])
assert np.all(q1.q == q.q)
def test_from_eq_vectorized():
# the following line would give unexpected results
# because the input is interpreted as a (non-vectorized) transform
# the shape of the input is (3,3)
# q = Quat(equatorial_23[0])
# this is the proper way:
q = Quat(equatorial=equatorial_23[0])
assert q.q.shape == (3, 4)
for i in indices(q.shape):
# check that Quat(equatorial).q[i] == Quat(equatorial[i]).q
assert np.all(q.q[i] == Quat(equatorial_23[0][i]).q)
q = Quat(equatorial=equatorial_23)
assert q.q.shape == (2, 3, 4)
for i in indices(q.shape):
# check that Quat(equatorial).q[i] == Quat(equatorial[i]).q
assert np.all(q.q[i] == Quat(equatorial_23[i]).q)
# test init from list
q = Quat(equatorial=[ra, dec, roll])
assert np.all(q.q == q0.q)
q = Quat(equatorial=equatorial_23)
assert np.all(q.q == q_23)
assert np.all(q.equatorial == equatorial_23)
assert np.all(q.transform == transform_23)
def test_from_eq_shapes():
q = Quat(equatorial=[10., 20., 30.])
assert np.array(q.ra0).shape == ()
assert np.array(q.roll0).shape == ()
assert np.array(q.ra).shape == ()
assert np.array(q.dec).shape == ()
assert np.array(q.roll).shape == ()
assert np.array(q.yaw).shape == ()
assert np.array(q.pitch).shape == ()
assert q.q.shape == (4, )
assert q.equatorial.shape == (3, )
assert q.transform.shape == (3, 3)
q = Quat(equatorial=equatorial_23[:1, :1])
assert q.ra0.shape == (1, 1)
assert q.roll0.shape == (1, 1)
assert q.ra.shape == (1, 1)
assert q.dec.shape == (1, 1)
assert q.roll.shape == (1, 1)
assert q.yaw.shape == (1, 1)
assert q.pitch.shape == (1, 1)
assert q.q.shape == (1, 1, 4)
assert q.equatorial.shape == (1, 1, 3)
assert q.transform.shape == (1, 1, 3, 3)
def test_transform_from_eq():
q = Quat(equatorial=equatorial_23)
assert q.transform.shape == (2, 3, 3, 3)
for i in indices(q.shape):
# check that
# Quat(equatorial).transform[i] == Quat(equatorial[i]).transform
assert np.all(q.transform[i] == Quat(equatorial_23[i]).transform)
def test_from_transform():
"""Initialize from inverse of q0 via transform matrix"""
q = Quat(q0.transform.transpose())
assert np.allclose(q.q[0], -0.26853582)
assert np.allclose(q.q[1], 0.14487813)
assert np.allclose(q.q[2], -0.12767944)
assert np.allclose(q.q[3], 0.94371436)
q = Quat(q0.transform)
assert np.allclose(q.roll0, 30)
assert np.allclose(q.ra0, 10)
q1 = Quat(transform=q0.transform)
assert np.all(q1.q == q.q)
def test_from_transform_vectorized():
q = Quat(transform=transform_23)
assert q.q.shape == (2, 3, 4)
for i in indices(q.shape):
# check that Quat(transform).q[i] == Quat(transform[i]).q
assert np.all(q.q[i] == Quat(transform=transform_23[i]).q)
q = Quat(transform=transform_23[:1, :1])
assert q.q.shape == (1, 1, 4)
t = [[[[9.25416578e-01, -3.18795778e-01, -2.04874129e-01],
[1.63175911e-01, 8.23172945e-01, -5.43838142e-01],
[3.42020143e-01, 4.69846310e-01, 8.13797681e-01]]]]
q = Quat(transform=t)
assert q.q.shape == (1, 1, 4)
q = Quat(transform=transform_23)
assert np.allclose(q.q, q_23)
# to compare roll, it has to be normalized to within a fixed angular range (0, 360).
eq = np.array(q.equatorial)
normalize_angles(eq[..., -1], 0, 360)
eq_23 = np.array(equatorial_23)
normalize_angles(eq_23[..., -1], 0, 360)
assert np.allclose(eq, eq_23)
assert np.allclose(q.transform, transform_23)
def test_eq_from_transform():
# this raises 'Unexpected negative norm' exception due to roundoff in copy/paste above
# q = Quat(transform=transform_23)
# assert q.equatorial.shape == (2, 3, 3)
# assert np.allclose(q.equatorial, equatorial_23)
t = np.zeros((4, 5, 3, 3))
t[:] = q0.transform[np.newaxis][np.newaxis]
q = Quat(transform=t)
assert np.allclose(q.roll0, 30)
assert np.allclose(q.ra0, 10)
assert q.equatorial.shape == (4, 5, 3)
def test_from_q_vectorized():
q = Quat(q=q_23)
assert q.shape == (2, 3)
# this also tests that quaternions with negative scalar component are flipped
flip = np.sign(q_23[..., -1]).reshape((2, 3, 1))
assert np.allclose(q.q, q_23 * flip)
# to compare roll, it has to be normalized to within a fixed angular range (0, 360).
eq = np.array(q.equatorial)
normalize_angles(eq[..., -1], 0, 360)
eq_23 = np.array(equatorial_23)
normalize_angles(eq_23[..., -1], 0, 360)
assert np.allclose(eq, eq_23, rtol=0)
assert np.allclose(q.transform, transform_23, rtol=0)
q = Quat(q=q_23[0])
assert q.shape == (3,)
q = Quat(q=q_23[:1, :1])
assert q.shape == (1, 1)
def test_inv_eq():
q = Quat(q0.equatorial)
t = q.transform
tinv = q.inv().transform
t_tinv = np.dot(t, tinv)
for v1, v2 in zip(t_tinv.flatten(), [1, 0, 0, 0, 1, 0, 0, 0, 1]):
assert np.allclose(v1, v2)
def test_inv_q():
q = Quat(q0.q)
assert q.q.shape == q.inv().q.shape
t = q.transform
tinv = q.inv().transform
t_tinv = np.dot(t, tinv)
for v1, v2 in zip(t_tinv.flatten(), [1, 0, 0, 0, 1, 0, 0, 0, 1]):
assert np.allclose(v1, v2)
def test_inv_vectorized():
q1 = Quat(q=q_23[:1, :1])
assert q1.q.shape == (1, 1, 4)
q1_inv = q1.inv()
assert q1_inv.q.shape == q1.q.shape
for i in indices(q1.shape):
# check that Quat(q).inv().q[i] == Quat(q[i]).inv().q
assert np.all(q1_inv.q[i] == Quat(q=q1.q[i]).inv().q)
def test_dq():
q1 = Quat((20, 30, 0))
q2 = Quat((20, 30.1, 1))
dq = q1.dq(q2)
assert np.allclose(dq.equatorial, (0, 0.1, 1))
# same from array instead of Quat
dq = q1.dq(q2.q)
assert np.allclose(dq.equatorial, (0, 0.1, 1))
def test_dq_vectorized():
q1 = Quat(q=q_23[:1, :2])
q2 = Quat(q=q_23[1:, 1:])
assert q1.q.shape == q2.q.shape
dq = q1.dq(q2)
assert dq.q.shape == q1.q.shape # shape (1,2,4)
# same but with array argument instead of Quat
dq2 = q1.dq(q=q2.q)
assert dq2.q.shape == dq.q.shape
assert np.all(dq2.q == dq.q)
for i in indices(q1.shape):
# check that Quat(q1).dq(q2).q[i] == Quat(q1[i]).dq(q2[i]).q
assert np.all(dq.q[i] == Quat(q=q1.q[i]).dq(Quat(q=q2.q[i])).q)
# note that both quaternions have same _internal_ shape, should this fail?
q1 = Quat((20, 30, 0))
q2 = Quat(equatorial=[[20, 30.1, 1]])
assert np.allclose(q1.dq(q2).equatorial, [[0, 0.1, 1]])
assert np.allclose(q1.dq(q=q2.q).equatorial, [[0, 0.1, 1]])
assert np.allclose(q1.dq(equatorial=q2.equatorial).equatorial, [[0, 0.1, 1]])
assert np.allclose(q1.dq(transform=q2.transform).equatorial, [[0, 0.1, 1]])
# and the interface is the same as the constructor:
with pytest.raises(TypeError):
q1.dq(q2.q)
with pytest.raises(TypeError):
q1.dq(q2.equatorial)
with pytest.raises(TypeError):
q1.dq(q2.transform)
def test_vector_to_scalar_correspondence():
"""
Simple test that all possible transform pathways give the same
answer when done in vectorized form as they do for the scalar version.
"""
atol = 1e-12
# Input equatorial has roll not in 0:360, so fix that for comparisons.
eq_23 = equatorial_23.copy()
normalize_angles(eq_23[..., -1], 0, 360)
# Compare vectorized computations for all possible input/output combos
# with the same for the scalar calculation.
q = Quat(equatorial=equatorial_23)
assert np.all(q.q == q_23)
assert np.all(q.equatorial == equatorial_23)
assert np.all(q.transform == transform_23)
q = Quat(q=q_23)
assert np.all(q.q == q_23)
assert np.allclose(q.equatorial, eq_23, rtol=0, atol=atol)
assert np.allclose(q.transform, transform_23, rtol=0, atol=atol)
q = Quat(transform=transform_23)
assert np.allclose(q.q, q_23, rtol=0, atol=atol)
assert np.allclose(q.equatorial, eq_23, rtol=0, atol=atol)
assert np.all(q.transform == transform_23)
def test_ra0_roll0():
q = Quat(Quat([-1, 0, -2]).q)
assert np.allclose(q.ra, 359)
assert np.allclose(q.ra0, -1)
assert np.allclose(q.roll, 358)
assert np.allclose(q.roll0, -2)
def test_repr():
q = Quat([1, 2, 3])
assert repr(q) == '<Quat q1=0.02632421 q2=-0.01721736 q3=0.00917905 q4=0.99946303>'
class SubQuat(Quat):
pass
q = SubQuat([1, 2, 3])
assert repr(q) == '<SubQuat q1=0.02632421 q2=-0.01721736 q3=0.00917905 q4=0.99946303>'
q = Quat(equatorial=[[1, 2, 3]])
assert repr(q) == 'Quat(array([[ 0.02632421, -0.01721736, 0.00917905, 0.99946303]]))'
def test_numeric_underflow():
"""
Test new code (below) for numeric issue https://github.com/sot/Quaternion/issues/1.
If this code is not included then the test fails with a MathDomainError::
one_minus_xn2 = 1 - xn**2
if one_minus_xn2 < 0:
if one_minus_xn2 < -1e-12:
raise ValueError('Unexpected negative norm: {}'.format(one_minus_xn2))
one_minus_xn2 = 0
"""
quat = Quat((0, 0, 0))
angle = 0
while angle < 360:
q = Quat((0, angle, 0))
quat = q * quat
_ = quat.equatorial
angle += 0.1
def test_div_mult():
q1 = Quat((1, 2, 3))
q2 = Quat((10, 20, 30))
q12d = q1 / q2
assert q1.shape == q12d.shape
assert q1.shape == q1.inv().shape
q12m = q1 * q2.inv()
assert q1.shape == q12m.shape
assert np.all(q12d.q == q12m.q)
q3 = Quat(equatorial=[[10, 20, 30]])
assert (q1 * q3).shape != q1.shape
assert (q1 * q3).shape == q3.shape
def test_mult_vectorized():
q1 = Quat(q=q_23[:1, :2]) # (shape (2,1)
q2 = Quat(q=q_23[1:, 1:]) # (shape (2,1)
assert q1.q.shape == q2.q.shape
q12 = q1 * q2
assert q12.q.shape == q1.q.shape
def test_normalize():
a = [[[1., 0., 0., 1.]]]
b = normalize(a)
assert np.isclose(np.sum(b**2), 1)
def test_copy():
# data members must be copies so they are not modified by accident
q = np.array(q_23[0, 0])
q1 = Quat(q=q)
q[-1] = 0
assert q1.q[-1] != 0
# this one passes
t = np.array(transform_23)
q1 = Quat(transform=t)
t[-1] = 0
assert not np.all(q1.transform == t)
# this one passes
eq = np.array([10, 90, 30])
q1 = Quat(equatorial=eq)
eq[-1] = 0
assert not np.all(q1.equatorial == eq)
def test_format():
# this is to test standard usage downstream
q = Quat(q_23[0, 0])
print(f'ra={q.ra:.5f}, dec={q.dec:.5f}, roll={q.roll:.5f}')
def test_scalar_attribute_types():
q = Quat(equatorial=[10, 20, 30])
attrs = ['ra', 'dec', 'roll', 'ra0', 'roll0', 'pitch', 'yaw', 'transform', 'q']
types = [np.float64] * 7 + [np.ndarray] * 2
# All returned as scalars
for attr, typ in zip(attrs, types):
assert type(getattr(q, attr)) is typ
q2 = Quat(transform=q.transform.astype(np.float32))
for attr, typ in zip(attrs, types):
assert type(getattr(q2, attr)) is typ
q2 = Quat(q=q.q.astype(np.float32))
for attr, typ in zip(attrs, types):
assert type(getattr(q, attr)) is typ
def test_mult_and_dq_broadcasted():
"""Test mult and delta quat of Quats with different but broadcastable shapes.
"""
q2 = Quat(equatorial=np.arange(18).reshape(3, 2, 3))
q1 = Quat(equatorial=[[10, 20, 30], [40, 50, 60]])
q0 = Quat(equatorial=[10, 20, 30])
# (3,2) * () = (3,2)
q20 = q2 * q0
dq20 = q2.dq(q0)
assert q20.shape == (3, 2)
assert dq20.shape == (3, 2)
for ii in range(3):
for jj in range(2):
qq = q2[ii, jj] * q0
dq = q2[ii, jj].dq(q0)
assert np.allclose(qq.q, q20.q[ii, jj])
assert np.allclose(dq.q, dq20.q[ii, jj])
# (3,2) * (2,) = (3,2)
q21 = q2 * q1
dq21 = q2.dq(q1)
assert q21.shape == (3, 2)
assert dq21.shape == (3, 2)
for ii in range(3):
for jj in range(2):
qq = q2[ii, jj] * q1[jj]
dq = q2[ii, jj].dq(q1[jj])
assert np.allclose(qq.q, q21.q[ii, jj])
assert np.allclose(dq.q, dq21.q[ii, jj])
def test_array_attribute_types():
q = Quat(equatorial=[[10, 20, 30]]) # 1-d
attrs = ['ra', 'dec', 'roll', 'ra0', 'roll0', 'pitch', 'yaw', 'transform', 'q']
shapes = [(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1, 3, 3), (1, 4)]
# All returned as shape (1,) array
for attr, shape in zip(attrs, shapes):
assert type(getattr(q, attr)) is np.ndarray
assert getattr(q, attr).shape == shape
q2 = Quat(transform=q.transform.astype(np.float32))
for attr, shape in zip(attrs, shapes):
assert type(getattr(q2, attr)) is np.ndarray
assert getattr(q, attr).shape == shape
q2 = Quat(q=q.q.astype(np.float32))
for attr, shape in zip(attrs, shapes):
assert type(getattr(q, attr)) is np.ndarray
assert getattr(q, attr).shape == shape
def test_pickle():
"""
Pickle file generated using Quaternion v3.4.1:
from Quaternion import Quat
import pickle
q = Quat([10., 20., 30.])
quats = [Quat(q.q), Quat(q.transform), Quat(q.equatorial)]
quats.append(q)
with open('quaternion-v3.4.1.pkl', 'wb') as f:
pickle.dump(quats, f)
"""
# testing we can unpickle older versions
filename = os.path.join(os.path.dirname(__file__), 'data', 'quaternion-v3.4.1.pkl')
with open(filename, 'rb') as f:
quaternions = pickle.load(f)
for q in quaternions:
assert np.all(np.isclose(q.q, [0.26853582, -0.14487813, 0.12767944, 0.94371436]))
assert np.all(np.isclose(q.equatorial, [10., 20., 30.]))
assert np.all(np.isclose(q.transform, [[0.92541658, -0.31879578, -0.20487413],
[0.16317591, 0.82317294, -0.54383814],
[0.34202014, 0.46984631, 0.81379768]]))
def test_init_quat_from_attitude():
# Basic tests for Quat.from_attitude
q = Quat.from_attitude([Quat([0, 1, 2]),
Quat([3, 4, 5])])
# 1-d list of Quat
assert np.allclose(q.equatorial, [[0, 1, 2],
[3, 4, 5]])
# From existing Quat
q2 = Quat.from_attitude(q)
assert np.all(q.q == q2.q)
assert q is not q2
# Normal Quat initializer: 3-element list implies equatorial
q = Quat.from_attitude([10, 20, 30])
assert np.allclose(q.equatorial, [10, 20, 30])
# 2-d list of Quat
q = Quat.from_attitude([[Quat([0, 1, 2]), Quat([3, 4, 5])]])
assert np.allclose(q.equatorial, [[[0, 1, 2],
[3, 4, 5]]])
# 1-d list of equatorial floats
q = Quat.from_attitude([[0, 1, 2], [3, 4, 5]])
assert np.allclose(q.equatorial, [[[0, 1, 2],
[3, 4, 5]]])
# Heterogenous list of floats
q = Quat.from_attitude([[0, 1, 2], [0, 1, 0, 0]])
assert np.allclose(q.equatorial, [[0, 1, 2],
[180, 0, 180]])
# Bad 1-d list of equatorial floats
with pytest.raises(ValueError, match="Float input must be a Nx3 or Nx4 array"):
q = Quat.from_attitude([[0, 1, 2, 4, 5], [3, 4, 5, 6, 7]])
# 1-d list of 4-vectors
q_list = [[0, 0, 1, 0], [0, 1, 0, 0]]
q = Quat.from_attitude(q_list)
assert np.allclose(q.q, q_list)
# Bad input
with pytest.raises(ValueError, match="Unable to initialize Quat from 'blah'"):
Quat.from_attitude('blah')
def test_rotate_x_to_vec_regress():
"""Note that truth values are just results from original code in Ska.quatutil.
They have not been independently validated"""
vec = [1, 2, 3]
q = Quat.rotate_x_to_vec(vec) # method='radec', default
assert np.allclose(q.q, [0.2358142, -0.38155539, 0.4698775, 0.76027777])
q = Quat.rotate_x_to_vec(vec, method='shortest')
assert np.allclose(q.q, [0., -0.50362718, 0.33575146, 0.79600918])
q = Quat.rotate_x_to_vec(vec, method='keep_z')
assert np.allclose(q.q, [-0.16269544, -0.56161937, 0.22572786, 0.77920525])
@pytest.mark.parametrize('method', ('keep_z', 'shortest', 'radec'))
def test_rotate_x_to_vec_functional(method):
vecs = np.random.random((100, 3)) - 0.5
for vec in vecs:
vec = vec / np.sqrt(np.sum(vec ** 2))
q = Quat.rotate_x_to_vec(vec, method)
vec1 = np.dot(q.transform, [1.0, 0, 0])
assert np.allclose(vec, vec1)
if method == 'radec':
assert np.isclose(q.roll, 0.0)
elif method == 'keep_z':
vec1 = np.dot(q.transform, [0, 0, 1.0])
assert np.isclose(vec1[1], 0.0)
def test_rotate_x_to_vec_bad_method():
with pytest.raises(ValueError, match='method must be one of'):
Quat.rotate_x_to_vec([1, 2, 3], 'not-a-method')
def test_rotate_about_vec():
q = Quat([10, 20, 30])
q2 = q.rotate_about_vec([0, 0, 10], 25)
assert np.allclose(q2.equatorial, [10 + 25, 20, 30])
q2 = q.rotate_about_vec([-10, 0, 0], 180)
assert np.allclose(q2.equatorial, [350., -20., 210.])
def test_rotate_about_vec_exceptions():
q1 = Quat([10, 20, 30])
q2 = Quat(equatorial=[[10, 20, 30], [1, 2, 3]])
with pytest.raises(ValueError, match='vec must be a single 3-vector'):
q1.rotate_about_vec([[1, 2, 3], [4, 5, 6]], 25)
with pytest.raises(ValueError, match='alpha must be a scalar'):
q1.rotate_about_vec([1, 2, 3], [25, 50])
with pytest.raises(ValueError, match='quaternion must be a scalar'):
q2.rotate_about_vec([1, 2, 3], 25)
@pytest.mark.parametrize('attr', ['q', 'equatorial', 'transform'])
def test_setting_different_shape(attr):
q0 = Quat([1, 2, 3])
q1 = Quat(equatorial=[[3, 1, 2],
[4, 5, 6]])
assert q1.shape == (2,)
val = getattr(q1, attr)
setattr(q0, attr, val)
assert q0.shape == q1.shape
assert np.all(getattr(q0, attr) == getattr(q1, attr))
|
[
"numpy.sum",
"numpy.allclose",
"os.path.dirname",
"numpy.zeros",
"numpy.any",
"pytest.raises",
"numpy.where",
"numpy.array",
"pickle.load",
"numpy.random.random",
"numpy.sign",
"numpy.dot",
"pytest.mark.parametrize",
"numpy.isclose",
"numpy.arange",
"numpy.all"
] |
[((517, 636), 'numpy.array', 'np.array', (['[[[10, 20, 30], [10, 20, -30], [10, -60, 30]], [[10, 20, 0], [10, 50, 30],\n [10, -50, -30]]]'], {'dtype': 'float'}), '([[[10, 20, 30], [10, 20, -30], [10, -60, 30]], [[10, 20, 0], [10, \n 50, 30], [10, -50, -30]]], dtype=float)\n', (525, 636), True, 'import numpy as np\n'), ((774, 818), 'numpy.zeros', 'np.zeros', (['(equatorial_23[..., 0].shape + (4,))'], {}), '(equatorial_23[..., 0].shape + (4,))\n', (782, 818), True, 'import numpy as np\n'), ((933, 979), 'numpy.zeros', 'np.zeros', (['(equatorial_23[..., 0].shape + (3, 3))'], {}), '(equatorial_23[..., 0].shape + (3, 3))\n', (941, 979), True, 'import numpy as np\n'), ((19703, 19769), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""method"""', "('keep_z', 'shortest', 'radec')"], {}), "('method', ('keep_z', 'shortest', 'radec'))\n", (19726, 19769), False, 'import pytest\n'), ((21180, 21245), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""attr"""', "['q', 'equatorial', 'transform']"], {}), "('attr', ['q', 'equatorial', 'transform'])\n", (21203, 21245), False, 'import pytest\n'), ((313, 330), 'numpy.any', 'np.any', (['(x >= xmax)'], {}), '(x >= xmax)\n', (319, 330), True, 'import numpy as np\n'), ((382, 398), 'numpy.any', 'np.any', (['(x < xmin)'], {}), '(x < xmin)\n', (388, 398), True, 'import numpy as np\n'), ((2908, 2919), 'numpy.array', 'np.array', (['q'], {}), '(q)\n', (2916, 2919), True, 'import numpy as np\n'), ((2931, 2948), 'numpy.all', 'np.all', (['(q1.q == q)'], {}), '(q1.q == q)\n', (2937, 2948), True, 'import numpy as np\n'), ((2960, 2977), 'numpy.all', 'np.all', (['(q2.q == q)'], {}), '(q2.q == q)\n', (2966, 2977), True, 'import numpy as np\n'), ((2989, 3006), 'numpy.all', 'np.all', (['(q3.q == q)'], {}), '(q3.q == q)\n', (2995, 3006), True, 'import numpy as np\n'), ((3070, 3101), 'numpy.allclose', 'np.allclose', (['q.q[0]', '(0.26853582)'], {}), '(q.q[0], 0.26853582)\n', (3081, 3101), True, 'import numpy as np\n'), ((3113, 3145), 'numpy.allclose', 'np.allclose', (['q.q[1]', '(-0.14487813)'], {}), '(q.q[1], -0.14487813)\n', (3124, 3145), True, 'import numpy as np\n'), ((3157, 3188), 'numpy.allclose', 'np.allclose', (['q.q[2]', '(0.12767944)'], {}), '(q.q[2], 0.12767944)\n', (3168, 3188), True, 'import numpy as np\n'), ((3200, 3231), 'numpy.allclose', 'np.allclose', (['q.q[3]', '(0.94371436)'], {}), '(q.q[3], 0.94371436)\n', (3211, 3231), True, 'import numpy as np\n'), ((3243, 3267), 'numpy.allclose', 'np.allclose', (['q.roll0', '(30)'], {}), '(q.roll0, 30)\n', (3254, 3267), True, 'import numpy as np\n'), ((3279, 3301), 'numpy.allclose', 'np.allclose', (['q.ra0', '(10)'], {}), '(q.ra0, 10)\n', (3290, 3301), True, 'import numpy as np\n'), ((3411, 3430), 'numpy.all', 'np.all', (['(q1.q == q.q)'], {}), '(q1.q == q.q)\n', (3417, 3430), True, 'import numpy as np\n'), ((4236, 4255), 'numpy.all', 'np.all', (['(q.q == q0.q)'], {}), '(q.q == q0.q)\n', (4242, 4255), True, 'import numpy as np\n'), ((4307, 4326), 'numpy.all', 'np.all', (['(q.q == q_23)'], {}), '(q.q == q_23)\n', (4313, 4326), True, 'import numpy as np\n'), ((4338, 4375), 'numpy.all', 'np.all', (['(q.equatorial == equatorial_23)'], {}), '(q.equatorial == equatorial_23)\n', (4344, 4375), True, 'import numpy as np\n'), ((4387, 4422), 'numpy.all', 'np.all', (['(q.transform == transform_23)'], {}), '(q.transform == transform_23)\n', (4393, 4422), True, 'import numpy as np\n'), ((5738, 5770), 'numpy.allclose', 'np.allclose', (['q.q[0]', '(-0.26853582)'], {}), '(q.q[0], -0.26853582)\n', (5749, 5770), True, 'import numpy as np\n'), ((5782, 5813), 'numpy.allclose', 'np.allclose', (['q.q[1]', '(0.14487813)'], {}), '(q.q[1], 0.14487813)\n', (5793, 5813), True, 'import numpy as np\n'), ((5825, 5857), 'numpy.allclose', 'np.allclose', (['q.q[2]', '(-0.12767944)'], {}), '(q.q[2], -0.12767944)\n', (5836, 5857), True, 'import numpy as np\n'), ((5869, 5900), 'numpy.allclose', 'np.allclose', (['q.q[3]', '(0.94371436)'], {}), '(q.q[3], 0.94371436)\n', (5880, 5900), True, 'import numpy as np\n'), ((5940, 5964), 'numpy.allclose', 'np.allclose', (['q.roll0', '(30)'], {}), '(q.roll0, 30)\n', (5951, 5964), True, 'import numpy as np\n'), ((5976, 5998), 'numpy.allclose', 'np.allclose', (['q.ra0', '(10)'], {}), '(q.ra0, 10)\n', (5987, 5998), True, 'import numpy as np\n'), ((6049, 6068), 'numpy.all', 'np.all', (['(q1.q == q.q)'], {}), '(q1.q == q.q)\n', (6055, 6068), True, 'import numpy as np\n'), ((6722, 6744), 'numpy.allclose', 'np.allclose', (['q.q', 'q_23'], {}), '(q.q, q_23)\n', (6733, 6744), True, 'import numpy as np\n'), ((6843, 6865), 'numpy.array', 'np.array', (['q.equatorial'], {}), '(q.equatorial)\n', (6851, 6865), True, 'import numpy as np\n'), ((6920, 6943), 'numpy.array', 'np.array', (['equatorial_23'], {}), '(equatorial_23)\n', (6928, 6943), True, 'import numpy as np\n'), ((7000, 7022), 'numpy.allclose', 'np.allclose', (['eq', 'eq_23'], {}), '(eq, eq_23)\n', (7011, 7022), True, 'import numpy as np\n'), ((7034, 7072), 'numpy.allclose', 'np.allclose', (['q.transform', 'transform_23'], {}), '(q.transform, transform_23)\n', (7045, 7072), True, 'import numpy as np\n'), ((7343, 7365), 'numpy.zeros', 'np.zeros', (['(4, 5, 3, 3)'], {}), '((4, 5, 3, 3))\n', (7351, 7365), True, 'import numpy as np\n'), ((7451, 7475), 'numpy.allclose', 'np.allclose', (['q.roll0', '(30)'], {}), '(q.roll0, 30)\n', (7462, 7475), True, 'import numpy as np\n'), ((7487, 7509), 'numpy.allclose', 'np.allclose', (['q.ra0', '(10)'], {}), '(q.ra0, 10)\n', (7498, 7509), True, 'import numpy as np\n'), ((7782, 7811), 'numpy.allclose', 'np.allclose', (['q.q', '(q_23 * flip)'], {}), '(q.q, q_23 * flip)\n', (7793, 7811), True, 'import numpy as np\n'), ((7910, 7932), 'numpy.array', 'np.array', (['q.equatorial'], {}), '(q.equatorial)\n', (7918, 7932), True, 'import numpy as np\n'), ((7987, 8010), 'numpy.array', 'np.array', (['equatorial_23'], {}), '(equatorial_23)\n', (7995, 8010), True, 'import numpy as np\n'), ((8067, 8097), 'numpy.allclose', 'np.allclose', (['eq', 'eq_23'], {'rtol': '(0)'}), '(eq, eq_23, rtol=0)\n', (8078, 8097), True, 'import numpy as np\n'), ((8109, 8155), 'numpy.allclose', 'np.allclose', (['q.transform', 'transform_23'], {'rtol': '(0)'}), '(q.transform, transform_23, rtol=0)\n', (8120, 8155), True, 'import numpy as np\n'), ((8378, 8393), 'numpy.dot', 'np.dot', (['t', 'tinv'], {}), '(t, tinv)\n', (8384, 8393), True, 'import numpy as np\n'), ((8640, 8655), 'numpy.dot', 'np.dot', (['t', 'tinv'], {}), '(t, tinv)\n', (8646, 8655), True, 'import numpy as np\n'), ((9176, 9215), 'numpy.allclose', 'np.allclose', (['dq.equatorial', '(0, 0.1, 1)'], {}), '(dq.equatorial, (0, 0.1, 1))\n', (9187, 9215), True, 'import numpy as np\n'), ((9287, 9326), 'numpy.allclose', 'np.allclose', (['dq.equatorial', '(0, 0.1, 1)'], {}), '(dq.equatorial, (0, 0.1, 1))\n', (9298, 9326), True, 'import numpy as np\n'), ((9648, 9669), 'numpy.all', 'np.all', (['(dq2.q == dq.q)'], {}), '(dq2.q == dq.q)\n', (9654, 9669), True, 'import numpy as np\n'), ((11066, 11085), 'numpy.all', 'np.all', (['(q.q == q_23)'], {}), '(q.q == q_23)\n', (11072, 11085), True, 'import numpy as np\n'), ((11097, 11134), 'numpy.all', 'np.all', (['(q.equatorial == equatorial_23)'], {}), '(q.equatorial == equatorial_23)\n', (11103, 11134), True, 'import numpy as np\n'), ((11146, 11181), 'numpy.all', 'np.all', (['(q.transform == transform_23)'], {}), '(q.transform == transform_23)\n', (11152, 11181), True, 'import numpy as np\n'), ((11215, 11234), 'numpy.all', 'np.all', (['(q.q == q_23)'], {}), '(q.q == q_23)\n', (11221, 11234), True, 'import numpy as np\n'), ((11246, 11297), 'numpy.allclose', 'np.allclose', (['q.equatorial', 'eq_23'], {'rtol': '(0)', 'atol': 'atol'}), '(q.equatorial, eq_23, rtol=0, atol=atol)\n', (11257, 11297), True, 'import numpy as np\n'), ((11309, 11366), 'numpy.allclose', 'np.allclose', (['q.transform', 'transform_23'], {'rtol': '(0)', 'atol': 'atol'}), '(q.transform, transform_23, rtol=0, atol=atol)\n', (11320, 11366), True, 'import numpy as np\n'), ((11416, 11457), 'numpy.allclose', 'np.allclose', (['q.q', 'q_23'], {'rtol': '(0)', 'atol': 'atol'}), '(q.q, q_23, rtol=0, atol=atol)\n', (11427, 11457), True, 'import numpy as np\n'), ((11469, 11520), 'numpy.allclose', 'np.allclose', (['q.equatorial', 'eq_23'], {'rtol': '(0)', 'atol': 'atol'}), '(q.equatorial, eq_23, rtol=0, atol=atol)\n', (11480, 11520), True, 'import numpy as np\n'), ((11532, 11567), 'numpy.all', 'np.all', (['(q.transform == transform_23)'], {}), '(q.transform == transform_23)\n', (11538, 11567), True, 'import numpy as np\n'), ((11637, 11659), 'numpy.allclose', 'np.allclose', (['q.ra', '(359)'], {}), '(q.ra, 359)\n', (11648, 11659), True, 'import numpy as np\n'), ((11671, 11693), 'numpy.allclose', 'np.allclose', (['q.ra0', '(-1)'], {}), '(q.ra0, -1)\n', (11682, 11693), True, 'import numpy as np\n'), ((11705, 11729), 'numpy.allclose', 'np.allclose', (['q.roll', '(358)'], {}), '(q.roll, 358)\n', (11716, 11729), True, 'import numpy as np\n'), ((11741, 11765), 'numpy.allclose', 'np.allclose', (['q.roll0', '(-2)'], {}), '(q.roll0, -2)\n', (11752, 11765), True, 'import numpy as np\n'), ((13026, 13050), 'numpy.all', 'np.all', (['(q12d.q == q12m.q)'], {}), '(q12d.q == q12m.q)\n', (13032, 13050), True, 'import numpy as np\n'), ((13595, 13615), 'numpy.array', 'np.array', (['q_23[0, 0]'], {}), '(q_23[0, 0])\n', (13603, 13615), True, 'import numpy as np\n'), ((13705, 13727), 'numpy.array', 'np.array', (['transform_23'], {}), '(transform_23)\n', (13713, 13727), True, 'import numpy as np\n'), ((13842, 13864), 'numpy.array', 'np.array', (['[10, 90, 30]'], {}), '([10, 90, 30])\n', (13850, 13864), True, 'import numpy as np\n'), ((17715, 17764), 'numpy.allclose', 'np.allclose', (['q.equatorial', '[[0, 1, 2], [3, 4, 5]]'], {}), '(q.equatorial, [[0, 1, 2], [3, 4, 5]])\n', (17726, 17764), True, 'import numpy as np\n'), ((17871, 17890), 'numpy.all', 'np.all', (['(q.q == q2.q)'], {}), '(q.q == q2.q)\n', (17877, 17890), True, 'import numpy as np\n'), ((18032, 18071), 'numpy.allclose', 'np.allclose', (['q.equatorial', '[10, 20, 30]'], {}), '(q.equatorial, [10, 20, 30])\n', (18043, 18071), True, 'import numpy as np\n'), ((18172, 18223), 'numpy.allclose', 'np.allclose', (['q.equatorial', '[[[0, 1, 2], [3, 4, 5]]]'], {}), '(q.equatorial, [[[0, 1, 2], [3, 4, 5]]])\n', (18183, 18223), True, 'import numpy as np\n'), ((18362, 18413), 'numpy.allclose', 'np.allclose', (['q.equatorial', '[[[0, 1, 2], [3, 4, 5]]]'], {}), '(q.equatorial, [[[0, 1, 2], [3, 4, 5]]])\n', (18373, 18413), True, 'import numpy as np\n'), ((18553, 18606), 'numpy.allclose', 'np.allclose', (['q.equatorial', '[[0, 1, 2], [180, 0, 180]]'], {}), '(q.equatorial, [[0, 1, 2], [180, 0, 180]])\n', (18564, 18606), True, 'import numpy as np\n'), ((18954, 18978), 'numpy.allclose', 'np.allclose', (['q.q', 'q_list'], {}), '(q.q, q_list)\n', (18965, 18978), True, 'import numpy as np\n'), ((19377, 19442), 'numpy.allclose', 'np.allclose', (['q.q', '[0.2358142, -0.38155539, 0.4698775, 0.76027777]'], {}), '(q.q, [0.2358142, -0.38155539, 0.4698775, 0.76027777])\n', (19388, 19442), True, 'import numpy as np\n'), ((19508, 19568), 'numpy.allclose', 'np.allclose', (['q.q', '[0.0, -0.50362718, 0.33575146, 0.79600918]'], {}), '(q.q, [0.0, -0.50362718, 0.33575146, 0.79600918])\n', (19519, 19568), True, 'import numpy as np\n'), ((19631, 19699), 'numpy.allclose', 'np.allclose', (['q.q', '[-0.16269544, -0.56161937, 0.22572786, 0.77920525]'], {}), '(q.q, [-0.16269544, -0.56161937, 0.22572786, 0.77920525])\n', (19642, 19699), True, 'import numpy as np\n'), ((20538, 20583), 'numpy.allclose', 'np.allclose', (['q2.equatorial', '[10 + 25, 20, 30]'], {}), '(q2.equatorial, [10 + 25, 20, 30])\n', (20549, 20583), True, 'import numpy as np\n'), ((20642, 20691), 'numpy.allclose', 'np.allclose', (['q2.equatorial', '[350.0, -20.0, 210.0]'], {}), '(q2.equatorial, [350.0, -20.0, 210.0])\n', (20653, 20691), True, 'import numpy as np\n'), ((345, 371), 'numpy.where', 'np.where', (['(x > xmax)', '(360)', '(0)'], {}), '(x > xmax, 360, 0)\n', (353, 371), True, 'import numpy as np\n'), ((413, 439), 'numpy.where', 'np.where', (['(x < xmin)', '(360)', '(0)'], {}), '(x < xmin, 360, 0)\n', (421, 439), True, 'import numpy as np\n'), ((1176, 1205), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (1189, 1205), False, 'import pytest\n'), ((1269, 1293), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1282, 1293), False, 'import pytest\n'), ((1370, 1394), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1383, 1394), False, 'import pytest\n'), ((1480, 1504), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1493, 1504), False, 'import pytest\n'), ((1589, 1613), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1602, 1613), False, 'import pytest\n'), ((1688, 1712), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1701, 1712), False, 'import pytest\n'), ((1787, 1811), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1800, 1811), False, 'import pytest\n'), ((1884, 1908), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1897, 1908), False, 'import pytest\n'), ((1972, 1996), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1985, 1996), False, 'import pytest\n'), ((2068, 2093), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2081, 2093), False, 'import pytest\n'), ((2186, 2211), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2199, 2211), False, 'import pytest\n'), ((2300, 2325), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2313, 2325), False, 'import pytest\n'), ((2427, 2452), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2440, 2452), False, 'import pytest\n'), ((2576, 2601), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2589, 2601), False, 'import pytest\n'), ((2673, 2698), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2686, 2698), False, 'import pytest\n'), ((8479, 8498), 'numpy.allclose', 'np.allclose', (['v1', 'v2'], {}), '(v1, v2)\n', (8490, 8498), True, 'import numpy as np\n'), ((8741, 8760), 'numpy.allclose', 'np.allclose', (['v1', 'v2'], {}), '(v1, v2)\n', (8752, 8760), True, 'import numpy as np\n'), ((10344, 10368), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (10357, 10368), False, 'import pytest\n'), ((10399, 10423), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (10412, 10423), False, 'import pytest\n'), ((10463, 10487), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (10476, 10487), False, 'import pytest\n'), ((13480, 13494), 'numpy.sum', 'np.sum', (['(b ** 2)'], {}), '(b ** 2)\n', (13486, 13494), True, 'import numpy as np\n'), ((13784, 13809), 'numpy.all', 'np.all', (['(q1.transform == t)'], {}), '(q1.transform == t)\n', (13790, 13809), True, 'import numpy as np\n'), ((13924, 13951), 'numpy.all', 'np.all', (['(q1.equatorial == eq)'], {}), '(q1.equatorial == eq)\n', (13930, 13951), True, 'import numpy as np\n'), ((16937, 16962), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (16952, 16962), False, 'import os\n'), ((17055, 17069), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (17066, 17069), False, 'import pickle\n'), ((18695, 18768), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Float input must be a Nx3 or Nx4 array"""'}), "(ValueError, match='Float input must be a Nx3 or Nx4 array')\n", (18708, 18768), False, 'import pytest\n'), ((19005, 19077), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Unable to initialize Quat from \'blah\'"""'}), '(ValueError, match="Unable to initialize Quat from \'blah\'")\n', (19018, 19077), False, 'import pytest\n'), ((19826, 19852), 'numpy.random.random', 'np.random.random', (['(100, 3)'], {}), '((100, 3))\n', (19842, 19852), True, 'import numpy as np\n'), ((19987, 20019), 'numpy.dot', 'np.dot', (['q.transform', '[1.0, 0, 0]'], {}), '(q.transform, [1.0, 0, 0])\n', (19993, 20019), True, 'import numpy as np\n'), ((20035, 20057), 'numpy.allclose', 'np.allclose', (['vec', 'vec1'], {}), '(vec, vec1)\n', (20046, 20057), True, 'import numpy as np\n'), ((20311, 20367), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""method must be one of"""'}), "(ValueError, match='method must be one of')\n", (20324, 20367), False, 'import pytest\n'), ((20820, 20884), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""vec must be a single 3-vector"""'}), "(ValueError, match='vec must be a single 3-vector')\n", (20833, 20884), False, 'import pytest\n'), ((20952, 21009), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""alpha must be a scalar"""'}), "(ValueError, match='alpha must be a scalar')\n", (20965, 21009), False, 'import pytest\n'), ((21070, 21132), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""quaternion must be a scalar"""'}), "(ValueError, match='quaternion must be a scalar')\n", (21083, 21132), False, 'import pytest\n'), ((1128, 1139), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (1136, 1139), True, 'import numpy as np\n'), ((1632, 1646), 'numpy.zeros', 'np.zeros', (['(2,)'], {}), '((2,))\n', (1640, 1646), True, 'import numpy as np\n'), ((1731, 1745), 'numpy.zeros', 'np.zeros', (['(5,)'], {}), '((5,))\n', (1739, 1745), True, 'import numpy as np\n'), ((4504, 4519), 'numpy.array', 'np.array', (['q.ra0'], {}), '(q.ra0)\n', (4512, 4519), True, 'import numpy as np\n'), ((4543, 4560), 'numpy.array', 'np.array', (['q.roll0'], {}), '(q.roll0)\n', (4551, 4560), True, 'import numpy as np\n'), ((4584, 4598), 'numpy.array', 'np.array', (['q.ra'], {}), '(q.ra)\n', (4592, 4598), True, 'import numpy as np\n'), ((4622, 4637), 'numpy.array', 'np.array', (['q.dec'], {}), '(q.dec)\n', (4630, 4637), True, 'import numpy as np\n'), ((4661, 4677), 'numpy.array', 'np.array', (['q.roll'], {}), '(q.roll)\n', (4669, 4677), True, 'import numpy as np\n'), ((4701, 4716), 'numpy.array', 'np.array', (['q.yaw'], {}), '(q.yaw)\n', (4709, 4716), True, 'import numpy as np\n'), ((4740, 4757), 'numpy.array', 'np.array', (['q.pitch'], {}), '(q.pitch)\n', (4748, 4757), True, 'import numpy as np\n'), ((7729, 7751), 'numpy.sign', 'np.sign', (['q_23[..., -1]'], {}), '(q_23[..., -1])\n', (7736, 7751), True, 'import numpy as np\n'), ((15247, 15279), 'numpy.allclose', 'np.allclose', (['qq.q', 'q20.q[ii, jj]'], {}), '(qq.q, q20.q[ii, jj])\n', (15258, 15279), True, 'import numpy as np\n'), ((15299, 15332), 'numpy.allclose', 'np.allclose', (['dq.q', 'dq20.q[ii, jj]'], {}), '(dq.q, dq20.q[ii, jj])\n', (15310, 15332), True, 'import numpy as np\n'), ((15610, 15642), 'numpy.allclose', 'np.allclose', (['qq.q', 'q21.q[ii, jj]'], {}), '(qq.q, q21.q[ii, jj])\n', (15621, 15642), True, 'import numpy as np\n'), ((15662, 15695), 'numpy.allclose', 'np.allclose', (['dq.q', 'dq21.q[ii, jj]'], {}), '(dq.q, dq21.q[ii, jj])\n', (15673, 15695), True, 'import numpy as np\n'), ((17118, 17184), 'numpy.isclose', 'np.isclose', (['q.q', '[0.26853582, -0.14487813, 0.12767944, 0.94371436]'], {}), '(q.q, [0.26853582, -0.14487813, 0.12767944, 0.94371436])\n', (17128, 17184), True, 'import numpy as np\n'), ((17208, 17252), 'numpy.isclose', 'np.isclose', (['q.equatorial', '[10.0, 20.0, 30.0]'], {}), '(q.equatorial, [10.0, 20.0, 30.0])\n', (17218, 17252), True, 'import numpy as np\n'), ((17273, 17425), 'numpy.isclose', 'np.isclose', (['q.transform', '[[0.92541658, -0.31879578, -0.20487413], [0.16317591, 0.82317294, -\n 0.54383814], [0.34202014, 0.46984631, 0.81379768]]'], {}), '(q.transform, [[0.92541658, -0.31879578, -0.20487413], [\n 0.16317591, 0.82317294, -0.54383814], [0.34202014, 0.46984631, 0.81379768]]\n )\n', (17283, 17425), True, 'import numpy as np\n'), ((20108, 20131), 'numpy.isclose', 'np.isclose', (['q.roll', '(0.0)'], {}), '(q.roll, 0.0)\n', (20118, 20131), True, 'import numpy as np\n'), ((1314, 1328), 'numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (1322, 1328), True, 'import numpy as np\n'), ((1424, 1438), 'numpy.zeros', 'np.zeros', (['(4,)'], {}), '((4,))\n', (1432, 1438), True, 'import numpy as np\n'), ((1533, 1547), 'numpy.zeros', 'np.zeros', (['(4,)'], {}), '((4,))\n', (1541, 1547), True, 'import numpy as np\n'), ((2114, 2125), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (2122, 2125), True, 'import numpy as np\n'), ((2137, 2153), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (2145, 2153), True, 'import numpy as np\n'), ((2232, 2243), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (2240, 2243), True, 'import numpy as np\n'), ((2256, 2267), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (2264, 2267), True, 'import numpy as np\n'), ((2355, 2366), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (2363, 2366), True, 'import numpy as np\n'), ((2378, 2394), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (2386, 2394), True, 'import numpy as np\n'), ((2502, 2513), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (2510, 2513), True, 'import numpy as np\n'), ((2525, 2541), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (2533, 2541), True, 'import numpy as np\n'), ((2554, 2565), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (2562, 2565), True, 'import numpy as np\n'), ((19908, 19924), 'numpy.sum', 'np.sum', (['(vec ** 2)'], {}), '(vec ** 2)\n', (19914, 19924), True, 'import numpy as np\n'), ((20184, 20216), 'numpy.dot', 'np.dot', (['q.transform', '[0, 0, 1.0]'], {}), '(q.transform, [0, 0, 1.0])\n', (20190, 20216), True, 'import numpy as np\n'), ((20236, 20260), 'numpy.isclose', 'np.isclose', (['vec1[1]', '(0.0)'], {}), '(vec1[1], 0.0)\n', (20246, 20260), True, 'import numpy as np\n'), ((14855, 14868), 'numpy.arange', 'np.arange', (['(18)'], {}), '(18)\n', (14864, 14868), True, 'import numpy as np\n')]
|
import numpy as np
import os
import scipy
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from PIL import Image
from scipy.misc import imresize
def gauss2D(shape=(3, 3),sigma=0.5):
m, n = [(ss-1.)/2. for ss in shape]
y, x = np.ogrid[-m:m+1,-n:n+1]
h = np.exp(-(x*x + y*y) / (2.*sigma*sigma))
h[ h < np.finfo(h.dtype).eps*h.max() ] = 0
sumh = h.sum()
if sumh != 0:
h /= sumh
return h
def normalize(img):
''' Function to normalize an input array to 0-1 '''
img_min = img.min()
img_max = img.max()
return (img - img_min) / (img_max - img_min)
def my_imfilter(image, imfilter):
output = np.zeros_like(image)
pad_x = (imfilter.shape[0] - 1) // 2
pad_y = (imfilter.shape[1] - 1) // 2
for ch in range(image.shape[2]):
image_pad = np.lib.pad(image[:, :, ch], ((pad_x, pad_x), (pad_y, pad_y)), 'constant', constant_values=(0, 0))
for i in range(output.shape[0]):
for j in range(output.shape[1]):
# multiply first and sum together, i.e. convolution
output[i, j, ch] = np.sum(
np.multiply(image_pad[i:i + imfilter.shape[0], j:j + imfilter.shape[1]], imfilter))
return output
def vis_hybrid_image(hybrid_image):
scales = 5 # how many downsampled versions to create
scale_factor = 0.5 # how much to downsample each time
padding = 5 # how many pixels to pad.
original_height = hybrid_image.shape[0]
num_colors = hybrid_image.shape[2] # counting how many color channels the input has
output = hybrid_image[:]
cur_image = hybrid_image[:]
for i in range(1, scales):
# add padding
output = np.concatenate((output, np.ones((original_height, padding, num_colors))), axis=1)
# dowsample image;
cur_image = imresize(cur_image, scale_factor, 'bilinear').astype(np.float) / 255
# pad the top and append to the output
tmp = np.concatenate(
(np.ones((original_height - cur_image.shape[0], cur_image.shape[1], num_colors)), cur_image), axis=0)
output = np.concatenate((output, tmp), axis=1)
return output
def main():
image1 = mpimg.imread('marilyn.bmp')
image2 = mpimg.imread('einstein.bmp')
image1 = image1.astype(np.float32)/255
image2 = image2.astype(np.float32)/255
cutoff_frequency = 3
gaussian_filter = gauss2D(shape=(cutoff_frequency*4+1,cutoff_frequency*4+1), sigma = cutoff_frequency)
low_frequencies = my_imfilter(image1, gaussian_filter)
high_frequencies = image2 - my_imfilter(image2, gaussian_filter)
hybrid_image = low_frequencies + high_frequencies
plt.figure(1)
plt.imshow(low_frequencies)
plt.figure(2)
plt.imshow(high_frequencies+0.5)
vis = vis_hybrid_image(hybrid_image)
plt.figure(3)
plt.imshow(vis)
plt.show()
if __name__ == '__main__':
main()
|
[
"matplotlib.image.imread",
"numpy.zeros_like",
"matplotlib.pyplot.show",
"numpy.multiply",
"matplotlib.pyplot.imshow",
"numpy.ones",
"numpy.finfo",
"matplotlib.pyplot.figure",
"numpy.exp",
"scipy.misc.imresize",
"numpy.lib.pad",
"numpy.concatenate"
] |
[((300, 348), 'numpy.exp', 'np.exp', (['(-(x * x + y * y) / (2.0 * sigma * sigma))'], {}), '(-(x * x + y * y) / (2.0 * sigma * sigma))\n', (306, 348), True, 'import numpy as np\n'), ((676, 696), 'numpy.zeros_like', 'np.zeros_like', (['image'], {}), '(image)\n', (689, 696), True, 'import numpy as np\n'), ((2207, 2234), 'matplotlib.image.imread', 'mpimg.imread', (['"""marilyn.bmp"""'], {}), "('marilyn.bmp')\n", (2219, 2234), True, 'import matplotlib.image as mpimg\n'), ((2248, 2276), 'matplotlib.image.imread', 'mpimg.imread', (['"""einstein.bmp"""'], {}), "('einstein.bmp')\n", (2260, 2276), True, 'import matplotlib.image as mpimg\n'), ((2683, 2696), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (2693, 2696), True, 'import matplotlib.pyplot as plt\n'), ((2701, 2728), 'matplotlib.pyplot.imshow', 'plt.imshow', (['low_frequencies'], {}), '(low_frequencies)\n', (2711, 2728), True, 'import matplotlib.pyplot as plt\n'), ((2733, 2746), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (2743, 2746), True, 'import matplotlib.pyplot as plt\n'), ((2751, 2785), 'matplotlib.pyplot.imshow', 'plt.imshow', (['(high_frequencies + 0.5)'], {}), '(high_frequencies + 0.5)\n', (2761, 2785), True, 'import matplotlib.pyplot as plt\n'), ((2829, 2842), 'matplotlib.pyplot.figure', 'plt.figure', (['(3)'], {}), '(3)\n', (2839, 2842), True, 'import matplotlib.pyplot as plt\n'), ((2847, 2862), 'matplotlib.pyplot.imshow', 'plt.imshow', (['vis'], {}), '(vis)\n', (2857, 2862), True, 'import matplotlib.pyplot as plt\n'), ((2868, 2878), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2876, 2878), True, 'import matplotlib.pyplot as plt\n'), ((836, 937), 'numpy.lib.pad', 'np.lib.pad', (['image[:, :, ch]', '((pad_x, pad_x), (pad_y, pad_y))', '"""constant"""'], {'constant_values': '(0, 0)'}), "(image[:, :, ch], ((pad_x, pad_x), (pad_y, pad_y)), 'constant',\n constant_values=(0, 0))\n", (846, 937), True, 'import numpy as np\n'), ((2125, 2162), 'numpy.concatenate', 'np.concatenate', (['(output, tmp)'], {'axis': '(1)'}), '((output, tmp), axis=1)\n', (2139, 2162), True, 'import numpy as np\n'), ((1742, 1789), 'numpy.ones', 'np.ones', (['(original_height, padding, num_colors)'], {}), '((original_height, padding, num_colors))\n', (1749, 1789), True, 'import numpy as np\n'), ((2007, 2086), 'numpy.ones', 'np.ones', (['(original_height - cur_image.shape[0], cur_image.shape[1], num_colors)'], {}), '((original_height - cur_image.shape[0], cur_image.shape[1], num_colors))\n', (2014, 2086), True, 'import numpy as np\n'), ((351, 368), 'numpy.finfo', 'np.finfo', (['h.dtype'], {}), '(h.dtype)\n', (359, 368), True, 'import numpy as np\n'), ((1151, 1237), 'numpy.multiply', 'np.multiply', (['image_pad[i:i + imfilter.shape[0], j:j + imfilter.shape[1]]', 'imfilter'], {}), '(image_pad[i:i + imfilter.shape[0], j:j + imfilter.shape[1]],\n imfilter)\n', (1162, 1237), True, 'import numpy as np\n'), ((1848, 1893), 'scipy.misc.imresize', 'imresize', (['cur_image', 'scale_factor', '"""bilinear"""'], {}), "(cur_image, scale_factor, 'bilinear')\n", (1856, 1893), False, 'from scipy.misc import imresize\n')]
|
# -*- coding: utf-8 -*-
"""ORCHSET Dataset Loader
Orchset is intended to be used as a dataset for the development and
evaluation of melody extraction algorithms. This collection contains
64 audio excerpts focused on symphonic music with their corresponding
annotation of the melody.
For more details, please visit: https://zenodo.org/record/1289786#.XREpzaeZPx6
"""
import csv
import glob
import logging
import os
import shutil
import librosa
import numpy as np
from mirdata import download_utils
from mirdata import jams_utils
from mirdata import core
from mirdata import utils
BIBTEX = """@article{bosch2016evaluation,
title={Evaluation and combination of pitch estimation methods for melody extraction in symphonic classical music},
author={<NAME> and <NAME> and <NAME>},
journal={Journal of New Music Research},
volume={45},
number={2},
pages={101--117},
year={2016},
publisher={Taylor \\& Francis}
}"""
REMOTES = {
"all": download_utils.RemoteFileMetadata(
filename="Orchset_dataset_0.zip",
url="https://zenodo.org/record/1289786/files/Orchset_dataset_0.zip?download=1",
checksum="cf6fe52d64624f61ee116c752fb318ca",
destination_dir=None,
)
}
def _load_metadata(data_home):
predominant_inst_path = os.path.join(
data_home, "Orchset - Predominant Melodic Instruments.csv"
)
if not os.path.exists(predominant_inst_path):
logging.info("Metadata file {} not found.".format(predominant_inst_path))
return None
with open(predominant_inst_path, "r") as fhandle:
reader = csv.reader(fhandle, delimiter=",")
raw_data = []
for line in reader:
if line[0] == "excerpt":
continue
raw_data.append(line)
tf_dict = {"TRUE": True, "FALSE": False}
metadata_index = {}
for line in raw_data:
track_id = line[0].split(".")[0]
id_split = track_id.split(".")[0].split("-")
if id_split[0] == "Musorgski" or id_split[0] == "Rimski":
id_split[0] = "-".join(id_split[:2])
id_split.pop(1)
melodic_instruments = [s.split(",") for s in line[1].split("+")]
melodic_instruments = [
item.lower() for sublist in melodic_instruments for item in sublist
]
for i, inst in enumerate(melodic_instruments):
if inst == "string":
melodic_instruments[i] = "strings"
elif inst == "winds (solo)":
melodic_instruments[i] = "winds"
melodic_instruments = sorted(list(set(melodic_instruments)))
metadata_index[track_id] = {
"predominant_melodic_instruments-raw": line[1],
"predominant_melodic_instruments-normalized": melodic_instruments,
"alternating_melody": tf_dict[line[2]],
"contains_winds": tf_dict[line[3]],
"contains_strings": tf_dict[line[4]],
"contains_brass": tf_dict[line[5]],
"only_strings": tf_dict[line[6]],
"only_winds": tf_dict[line[7]],
"only_brass": tf_dict[line[8]],
"composer": id_split[0],
"work": "-".join(id_split[1:-1]),
"excerpt": id_split[-1][2:],
}
metadata_index["data_home"] = data_home
return metadata_index
DATA = utils.LargeData("orchset_index.json", _load_metadata)
class Track(core.Track):
"""orchset Track class
Args:
track_id (str): track id of the track
Attributes:
alternating_melody (bool): True if the melody alternates between instruments
audio_path_mono (str): path to the mono audio file
audio_path_stereo (str): path to the stereo audio file
composer (str): the work's composer
contains_brass (bool): True if the track contains any brass instrument
contains_strings (bool): True if the track contains any string instrument
contains_winds (bool): True if the track contains any wind instrument
excerpt (str): True if the track is an excerpt
melody_path (str): path to the melody annotation file
only_brass (bool): True if the track contains brass instruments only
only_strings (bool): True if the track contains string instruments only
only_winds (bool): True if the track contains wind instruments only
predominant_melodic_instruments (list): List of instruments which play the melody
track_id (str): track id
work (str): The musical work
"""
def __init__(self, track_id, data_home):
if track_id not in DATA.index['tracks']:
raise ValueError("{} is not a valid track ID in orchset".format(track_id))
self.track_id = track_id
self._data_home = data_home
self._track_paths = DATA.index['tracks'][track_id]
self.melody_path = os.path.join(self._data_home, self._track_paths["melody"][0])
metadata = DATA.metadata(data_home)
if metadata is not None and track_id in metadata:
self._track_metadata = metadata[track_id]
else:
self._track_metadata = {
"predominant_melodic_instruments-raw": None,
"predominant_melodic_instruments-normalized": None,
"alternating_melody": None,
"contains_winds": None,
"contains_strings": None,
"contains_brass": None,
"only_strings": None,
"only_winds": None,
"only_brass": None,
"composer": None,
"work": None,
"excerpt": None,
}
self.audio_path_mono = os.path.join(
self._data_home, self._track_paths["audio_mono"][0]
)
self.audio_path_stereo = os.path.join(
self._data_home, self._track_paths["audio_stereo"][0]
)
self.composer = self._track_metadata["composer"]
self.work = self._track_metadata["work"]
self.excerpt = self._track_metadata["excerpt"]
self.predominant_melodic_instruments = self._track_metadata[
"predominant_melodic_instruments-normalized"
]
self.alternating_melody = self._track_metadata["alternating_melody"]
self.contains_winds = self._track_metadata["contains_winds"]
self.contains_strings = self._track_metadata["contains_strings"]
self.contains_brass = self._track_metadata["contains_brass"]
self.only_strings = self._track_metadata["only_strings"]
self.only_winds = self._track_metadata["only_winds"]
self.only_brass = self._track_metadata["only_brass"]
@utils.cached_property
def melody(self):
"""F0Data: melody annotation"""
return load_melody(self.melody_path)
@property
def audio_mono(self):
"""(np.ndarray, float): mono audio signal, sample rate"""
return load_audio_mono(self.audio_path_mono)
@property
def audio_stereo(self):
"""(np.ndarray, float): stereo audio signal, sample rate"""
return load_audio_stereo(self.audio_path_stereo)
def to_jams(self):
"""Jams: the track's data in jams format"""
return jams_utils.jams_converter(
audio_path=self.audio_path_mono,
f0_data=[(self.melody, "annotated melody")],
metadata=self._track_metadata,
)
def load_audio_mono(audio_path):
"""Load a Orchset audio file.
Args:
audio_path (str): path to audio file
Returns:
y (np.ndarray): the mono audio signal
sr (float): The sample rate of the audio file
"""
if not os.path.exists(audio_path):
raise IOError("audio_path {} does not exist".format(audio_path))
return librosa.load(audio_path, sr=None, mono=True)
def load_audio_stereo(audio_path):
"""Load a Orchset audio file.
Args:
audio_path (str): path to audio file
Returns:
y (np.ndarray): the mono audio signal
sr (float): The sample rate of the audio file
"""
if not os.path.exists(audio_path):
raise IOError("audio_path {} does not exist".format(audio_path))
return librosa.load(audio_path, sr=None, mono=False)
def _download(
save_dir, remotes, partial_download, info_message, force_overwrite, cleanup
):
"""Download the dataset.
Args:
save_dir (str):
The directory to download the data
remotes (dict or None):
A dictionary of RemoteFileMetadata tuples of data in zip format.
If None, there is no data to download
partial_download (list or None):
A list of keys to partially download the remote objects of the download dict.
If None, all data is downloaded
info_message (str or None):
A string of info to print when this function is called.
If None, no string is printed.
force_overwrite (bool):
If True, existing files are overwritten by the downloaded files.
cleanup (bool):
Whether to delete the zip/tar file after extracting.
"""
download_utils.downloader(
save_dir,
remotes=remotes,
info_message=None,
force_overwrite=force_overwrite,
cleanup=cleanup,
)
# files get downloaded to a folder called Orchset - move everything up a level
duplicated_orchset_dir = os.path.join(save_dir, "Orchset")
orchset_files = glob.glob(os.path.join(duplicated_orchset_dir, "*"))
for fpath in orchset_files:
shutil.move(fpath, save_dir)
if os.path.exists(duplicated_orchset_dir):
shutil.rmtree(duplicated_orchset_dir)
def load_melody(melody_path):
if not os.path.exists(melody_path):
raise IOError("melody_path {} does not exist".format(melody_path))
times = []
freqs = []
confidence = []
with open(melody_path, "r") as fhandle:
reader = csv.reader(fhandle, delimiter="\t")
for line in reader:
times.append(float(line[0]))
freqs.append(float(line[1]))
confidence.append(0.0 if line[1] == "0" else 1.0)
melody_data = utils.F0Data(np.array(times), np.array(freqs), np.array(confidence))
return melody_data
|
[
"csv.reader",
"mirdata.download_utils.downloader",
"mirdata.jams_utils.jams_converter",
"os.path.exists",
"mirdata.download_utils.RemoteFileMetadata",
"mirdata.utils.LargeData",
"librosa.load",
"numpy.array",
"shutil.move",
"shutil.rmtree",
"os.path.join"
] |
[((3333, 3386), 'mirdata.utils.LargeData', 'utils.LargeData', (['"""orchset_index.json"""', '_load_metadata'], {}), "('orchset_index.json', _load_metadata)\n", (3348, 3386), False, 'from mirdata import utils\n'), ((969, 1192), 'mirdata.download_utils.RemoteFileMetadata', 'download_utils.RemoteFileMetadata', ([], {'filename': '"""Orchset_dataset_0.zip"""', 'url': '"""https://zenodo.org/record/1289786/files/Orchset_dataset_0.zip?download=1"""', 'checksum': '"""cf6fe52d64624f61ee116c752fb318ca"""', 'destination_dir': 'None'}), "(filename='Orchset_dataset_0.zip', url=\n 'https://zenodo.org/record/1289786/files/Orchset_dataset_0.zip?download=1',\n checksum='cf6fe52d64624f61ee116c752fb318ca', destination_dir=None)\n", (1002, 1192), False, 'from mirdata import download_utils\n'), ((1287, 1359), 'os.path.join', 'os.path.join', (['data_home', '"""Orchset - Predominant Melodic Instruments.csv"""'], {}), "(data_home, 'Orchset - Predominant Melodic Instruments.csv')\n", (1299, 1359), False, 'import os\n'), ((7772, 7816), 'librosa.load', 'librosa.load', (['audio_path'], {'sr': 'None', 'mono': '(True)'}), '(audio_path, sr=None, mono=True)\n', (7784, 7816), False, 'import librosa\n'), ((8191, 8236), 'librosa.load', 'librosa.load', (['audio_path'], {'sr': 'None', 'mono': '(False)'}), '(audio_path, sr=None, mono=False)\n', (8203, 8236), False, 'import librosa\n'), ((9140, 9265), 'mirdata.download_utils.downloader', 'download_utils.downloader', (['save_dir'], {'remotes': 'remotes', 'info_message': 'None', 'force_overwrite': 'force_overwrite', 'cleanup': 'cleanup'}), '(save_dir, remotes=remotes, info_message=None,\n force_overwrite=force_overwrite, cleanup=cleanup)\n', (9165, 9265), False, 'from mirdata import download_utils\n'), ((9421, 9454), 'os.path.join', 'os.path.join', (['save_dir', '"""Orchset"""'], {}), "(save_dir, 'Orchset')\n", (9433, 9454), False, 'import os\n'), ((9604, 9642), 'os.path.exists', 'os.path.exists', (['duplicated_orchset_dir'], {}), '(duplicated_orchset_dir)\n', (9618, 9642), False, 'import os\n'), ((1386, 1423), 'os.path.exists', 'os.path.exists', (['predominant_inst_path'], {}), '(predominant_inst_path)\n', (1400, 1423), False, 'import os\n'), ((1599, 1633), 'csv.reader', 'csv.reader', (['fhandle'], {'delimiter': '""","""'}), "(fhandle, delimiter=',')\n", (1609, 1633), False, 'import csv\n'), ((4863, 4924), 'os.path.join', 'os.path.join', (['self._data_home', "self._track_paths['melody'][0]"], {}), "(self._data_home, self._track_paths['melody'][0])\n", (4875, 4924), False, 'import os\n'), ((5681, 5746), 'os.path.join', 'os.path.join', (['self._data_home', "self._track_paths['audio_mono'][0]"], {}), "(self._data_home, self._track_paths['audio_mono'][0])\n", (5693, 5746), False, 'import os\n'), ((5802, 5869), 'os.path.join', 'os.path.join', (['self._data_home', "self._track_paths['audio_stereo'][0]"], {}), "(self._data_home, self._track_paths['audio_stereo'][0])\n", (5814, 5869), False, 'import os\n'), ((7218, 7357), 'mirdata.jams_utils.jams_converter', 'jams_utils.jams_converter', ([], {'audio_path': 'self.audio_path_mono', 'f0_data': "[(self.melody, 'annotated melody')]", 'metadata': 'self._track_metadata'}), "(audio_path=self.audio_path_mono, f0_data=[(self.\n melody, 'annotated melody')], metadata=self._track_metadata)\n", (7243, 7357), False, 'from mirdata import jams_utils\n'), ((7659, 7685), 'os.path.exists', 'os.path.exists', (['audio_path'], {}), '(audio_path)\n', (7673, 7685), False, 'import os\n'), ((8078, 8104), 'os.path.exists', 'os.path.exists', (['audio_path'], {}), '(audio_path)\n', (8092, 8104), False, 'import os\n'), ((9485, 9526), 'os.path.join', 'os.path.join', (['duplicated_orchset_dir', '"""*"""'], {}), "(duplicated_orchset_dir, '*')\n", (9497, 9526), False, 'import os\n'), ((9568, 9596), 'shutil.move', 'shutil.move', (['fpath', 'save_dir'], {}), '(fpath, save_dir)\n', (9579, 9596), False, 'import shutil\n'), ((9652, 9689), 'shutil.rmtree', 'shutil.rmtree', (['duplicated_orchset_dir'], {}), '(duplicated_orchset_dir)\n', (9665, 9689), False, 'import shutil\n'), ((9733, 9760), 'os.path.exists', 'os.path.exists', (['melody_path'], {}), '(melody_path)\n', (9747, 9760), False, 'import os\n'), ((9949, 9984), 'csv.reader', 'csv.reader', (['fhandle'], {'delimiter': '"""\t"""'}), "(fhandle, delimiter='\\t')\n", (9959, 9984), False, 'import csv\n'), ((10189, 10204), 'numpy.array', 'np.array', (['times'], {}), '(times)\n', (10197, 10204), True, 'import numpy as np\n'), ((10206, 10221), 'numpy.array', 'np.array', (['freqs'], {}), '(freqs)\n', (10214, 10221), True, 'import numpy as np\n'), ((10223, 10243), 'numpy.array', 'np.array', (['confidence'], {}), '(confidence)\n', (10231, 10243), True, 'import numpy as np\n')]
|
# Dieses Skript versucht die TDOA im Zeitbereich mit CSOM zu bestimmen
# Variation der Mikrofonabstände sowie Sampling Frequenz 96kHz
# Imports
import sys
import math
import numpy as np
sys.path.append("..\\..\\simulation")
sys.path.append("..\\..\\libraries")
from GeometryLibrary import calculateMicrophoneArray_2
from SimulationLibrary import load_configs, simulate
from GeometryLibrary import getPoint, estimateK_Pair, distance, getAngle_Pair, getAngle_angle1, angle_degree, KarstenDOA_calculateSteep_linear_simple
from GeometryLibrary import getIntersectionPointsCircle, KarstenDOA_calculateCurve_linear, KarstenDOA_calculateCurve_nonlinear, getMicrophonePair_DOA_Intersection_linear
from GeometryLibrary import getAngle_angle1, get_tCurve
from OptimizationLibrary import optimizeIntersectionPoint_nonLinear_numeric
from GraphicLibrary import drawPoint, drawCircle
from SignalProcessingLibrary import getSignalPower_UsingTime_AverageFree
sys.path.append("..\\..\\tdoa")
import array_parameters
import basic_tdoa
import matplotlib.pyplot as plt
sys.path.append("..\\..\\simulation")
sys.path.append("..\\..\\libraries")
from SimulationLibrary import load_configs, simulate
from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance
from OptimizationLibrary import optimizeIntersectionPoint_nonLinear_numeric
from GraphicLibrary import drawPoint, drawCircle
from SignalProcessingLibrary import butterWorthFilter, tdoa_csom, centralSecondOrderMomentFunction_part, getSignalPower_UsingTime_AverageFree, tdoa_gcc_phat, centralSecondOrderMomentFunction
def generateTime(sampling_rate, number_samples):
return np.arange(0,number_samples)/sampling_rate
def getSourceSignal(time):
source_signal = np.zeros_like(time)
for t in range(0,time.shape[0]):
source_signal[t] = signal_function(time[t])
return source_signal
def signal_function(x):
return 0.58 * math.sin(x * (2 * math.pi * 400.0)) if (x > 0.05 and x < 0.1) else 0
def convertPoint(p):
return getPoint(p[0],p[1])
def cutSignal(signal, meta_data, time_window):
time = np.arange(meta_data["number_samples"])/meta_data["sampling_rate"]
fromIndex = int(time_window["from"]/meta_data["duration"]*meta_data["number_samples"])
toIndex = int(time_window["to"]/meta_data["duration"]*meta_data["number_samples"])
return time[fromIndex:toIndex], signal[fromIndex:toIndex]
def cutSignal_sample(signal, meta_data, sample_window):
time = np.arange(meta_data["number_samples"])/meta_data["sampling_rate"]
fromIndex = sample_window["from"]
toIndex = sample_window["to"]
return time[fromIndex:toIndex], signal[fromIndex:toIndex]
def updateConfig(config, micList, microphone_noise_sigm, noise_environment, noise_source, source_pos):
config["microphone_noise_mus"] = list()
config["microphone_noise_sigmas"] = list()
config["microphone_noise_amplitudes"] = list()
config["microphone_positions"] = list()
for m in micList:
config["microphone_noise_mus"].append(0)
config["microphone_noise_sigmas"].append(microphone_noise_sigm)
config["microphone_noise_amplitudes"].append(1)
config["microphone_positions"].append(m)
config["source_position"] = source_pos
config["source_noise_sigma"] = noise_source
config["general_noise_sigma"] = noise_environment
return config
def getSNR(signal):
mx = np.max(signal)
thrFil = np.asarray([signal > mx*0.9])
fromIdx = np.where(thrFil == True)[1][0]
toIdx = thrFil.shape[1]-np.where(np.flip(thrFil) == True)[1][0]
time_window_signal = {"from" : fromIdx, "to": toIdx}
sig_cut = cutSignal_sample(signal, meta, time_window_signal)
time_window_signal = {"from" : 0, "to": 2000}
noi_cut = cutSignal_sample(signal, meta, time_window_signal)
powerSig = getSignalPower_UsingTime_AverageFree(sig_cut[1])
powerNoi = getSignalPower_UsingTime_AverageFree(noi_cut[1])
snrFac = (powerSig-powerNoi)/(powerNoi)
snrDB = 10*np.log(snrFac)
return powerSig, powerNoi, snrFac, snrDB
def getFakeTDOA(tdoa, sample_freq):
fac = 1
if(tdoa<0):
fac = -1
tdoa = -tdoa
inc = 0
while (inc*1/sample_freq<tdoa):
inc += 1
return inc*1/sample_freq*fac
def getRealTDOA(source_pos, micAPos, micBPos):
return getRealDeltaS(source_pos, micAPos, micBPos) / 343.3
def getRealDeltaS(source_pos, micAPos, micBPos):
return distance(micAPos, source_pos) - distance(micBPos, source_pos)
# Lade Konfiguration
config = load_configs("configEXP.json")[0]
N = 20
mic_dist = 0.4
num_mic = 8
xval = 78
distances = [1,5,10,20,40,60,80,100]
angles = [0, 10, 20, 30, 45, 60, 70, 80, 90]
for xval in [0]:#[0.6,0.65,0.7,0.75,0.8,0.85,0.9,0.95]:
l = list()
for dis in distances:
# print(dis)
lol = list()
for n in range(0,10):
# dis = 80
ang = 0
# Adjust Configurations
micList = list()
for i in range(0,num_mic):
micList.append(getPoint(mic_dist*np.sin(angle_radians(360/8*(2-i))),mic_dist*np.cos(angle_radians(360/8*(2-i)))))
noise_microphone = 0.003
noise_environment = 0.04
noise_source = 0.01
source_pos = getPoint(dis*np.sin(angle_radians(ang)),dis*np.cos(angle_radians(ang)))
config = updateConfig(config, micList, noise_microphone, noise_environment, noise_source, source_pos)
# Signal Simulation
loaded = simulate(config, config["source_position"], signal_function)
signals = loaded.get_measurements()
meta = loaded.get_meta_data()
signalsFiltered = list()
signalsPower = list()
signalsSNR = list()
for s in signals:
sf = butterWorthFilter(s, meta["sampling_rate"], 2000)
powerSig, powerNoi, snrFac, snrDB = getSNR(sf)
signalsFiltered.append(sf)
signalsPower.append(powerSig)
signalsSNR.append(snrDB)
# Calculate True K
K_true = 0
for k in range(0,len(micList)):
K_true += signalsPower[k]*distance(micList[k], source_pos)*distance(micList[k], source_pos)
K_true /= len(micList)
# Calculate K estimation
K_estim_exakt = list()
K_estim_noise = list()
for i in range(0,len(micList)):
for j in range(0, len(micList)):
if(i!=j):
a = getRealTDOA(source_pos, micList[i], micList[j])
b = getFakeTDOA(a, 48000)
K1_exakt, K2_exakt = estimateK_Pair(signalsPower[i], signalsPower[j], micList[i], micList[j], a*343.2)
K1_noised, K2_noised = estimateK_Pair(signalsPower[i], signalsPower[j], micList[i], micList[j], b*343.2)
K_estim_exakt.append(K1_exakt)
K_estim_exakt.append(K2_exakt)
K_estim_noise.append(K1_noised)
K_estim_noise.append(K2_noised)
# Remove NAN
K_estim_exakt = [x for x in K_estim_exakt if str(x) != 'nan']
K_estim_noise = [x for x in K_estim_noise if str(x) != 'nan']
K_estim_exakt.sort()
K_estim_noise.sort()
# K_estim = K_estim_noise[int(len(K_estim_noise)*xval)]
med = np.median(K_estim_noise)
ave = np.average(K_estim_noise)
lol.append(ave/K_true)
l.append(np.average(lol))
print(lol)
print(l)
# plt.subplot(2,1,1)
# #diff = np.insert(diff, 0, 0)
# #plt.plot(K_estim_exakt)
# plt.plot(K_estim_noise)
# plt.plot(K_estim_exakt)
# #plt.plot(butterWorthFilter(K_estim_exakt, meta["sampling_rate"], 5000))
# plt.axhline(y=K_true,color="red")
# plt.axvline(x=xval,color="red")
#
# plt.subplot(2,1,2)
# plt.plot(np.diff(K_estim_noise))
# plt.plot(np.diff(K_estim_exakt))
## Distance
#distanceReal = distance(source_pos, getPoint(0,0))
#angleReal = 90 - angle_degree(getAngle_angle1(getPoint(0,0), source_pos))
#
#import matplotlib.pyplot as plt
#
#for m in micList:
# drawPoint(m, "x", "blue", 50)
#drawPoint(source_pos, "x", "red", 50)
#
#drawCircle(micList[0], K_true/signalsPower[0]), "red")
#drawCircle(micList[4], K_true/signalsPower[4]), "red")
#
#plt.xlim(-40,40)
#plt.ylim(-30,30)
|
[
"numpy.arange",
"GeometryLibrary.getPoint",
"sys.path.append",
"numpy.zeros_like",
"SignalProcessingLibrary.butterWorthFilter",
"numpy.max",
"GeometryLibrary.distance",
"numpy.average",
"numpy.median",
"numpy.asarray",
"math.sin",
"GeometryLibrary.angle_radians",
"SimulationLibrary.simulate",
"numpy.flip",
"numpy.log",
"GeometryLibrary.estimateK_Pair",
"numpy.where",
"SignalProcessingLibrary.getSignalPower_UsingTime_AverageFree",
"SimulationLibrary.load_configs"
] |
[((188, 225), 'sys.path.append', 'sys.path.append', (['"""..\\\\..\\\\simulation"""'], {}), "('..\\\\..\\\\simulation')\n", (203, 225), False, 'import sys\n'), ((226, 262), 'sys.path.append', 'sys.path.append', (['"""..\\\\..\\\\libraries"""'], {}), "('..\\\\..\\\\libraries')\n", (241, 262), False, 'import sys\n'), ((946, 977), 'sys.path.append', 'sys.path.append', (['"""..\\\\..\\\\tdoa"""'], {}), "('..\\\\..\\\\tdoa')\n", (961, 977), False, 'import sys\n'), ((1055, 1092), 'sys.path.append', 'sys.path.append', (['"""..\\\\..\\\\simulation"""'], {}), "('..\\\\..\\\\simulation')\n", (1070, 1092), False, 'import sys\n'), ((1093, 1129), 'sys.path.append', 'sys.path.append', (['"""..\\\\..\\\\libraries"""'], {}), "('..\\\\..\\\\libraries')\n", (1108, 1129), False, 'import sys\n'), ((1729, 1748), 'numpy.zeros_like', 'np.zeros_like', (['time'], {}), '(time)\n', (1742, 1748), True, 'import numpy as np\n'), ((2009, 2029), 'GeometryLibrary.getPoint', 'getPoint', (['p[0]', 'p[1]'], {}), '(p[0], p[1])\n', (2017, 2029), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n'), ((3404, 3418), 'numpy.max', 'np.max', (['signal'], {}), '(signal)\n', (3410, 3418), True, 'import numpy as np\n'), ((3432, 3463), 'numpy.asarray', 'np.asarray', (['[signal > mx * 0.9]'], {}), '([signal > mx * 0.9])\n', (3442, 3463), True, 'import numpy as np\n'), ((3834, 3882), 'SignalProcessingLibrary.getSignalPower_UsingTime_AverageFree', 'getSignalPower_UsingTime_AverageFree', (['sig_cut[1]'], {}), '(sig_cut[1])\n', (3870, 3882), False, 'from SignalProcessingLibrary import butterWorthFilter, tdoa_csom, centralSecondOrderMomentFunction_part, getSignalPower_UsingTime_AverageFree, tdoa_gcc_phat, centralSecondOrderMomentFunction\n'), ((3898, 3946), 'SignalProcessingLibrary.getSignalPower_UsingTime_AverageFree', 'getSignalPower_UsingTime_AverageFree', (['noi_cut[1]'], {}), '(noi_cut[1])\n', (3934, 3946), False, 'from SignalProcessingLibrary import butterWorthFilter, tdoa_csom, centralSecondOrderMomentFunction_part, getSignalPower_UsingTime_AverageFree, tdoa_gcc_phat, centralSecondOrderMomentFunction\n'), ((4537, 4567), 'SimulationLibrary.load_configs', 'load_configs', (['"""configEXP.json"""'], {}), "('configEXP.json')\n", (4549, 4567), False, 'from SimulationLibrary import load_configs, simulate\n'), ((1639, 1667), 'numpy.arange', 'np.arange', (['(0)', 'number_samples'], {}), '(0, number_samples)\n', (1648, 1667), True, 'import numpy as np\n'), ((2088, 2126), 'numpy.arange', 'np.arange', (["meta_data['number_samples']"], {}), "(meta_data['number_samples'])\n", (2097, 2126), True, 'import numpy as np\n'), ((2469, 2507), 'numpy.arange', 'np.arange', (["meta_data['number_samples']"], {}), "(meta_data['number_samples'])\n", (2478, 2507), True, 'import numpy as np\n'), ((4011, 4025), 'numpy.log', 'np.log', (['snrFac'], {}), '(snrFac)\n', (4017, 4025), True, 'import numpy as np\n'), ((4444, 4473), 'GeometryLibrary.distance', 'distance', (['micAPos', 'source_pos'], {}), '(micAPos, source_pos)\n', (4452, 4473), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n'), ((4476, 4505), 'GeometryLibrary.distance', 'distance', (['micBPos', 'source_pos'], {}), '(micBPos, source_pos)\n', (4484, 4505), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n'), ((1907, 1942), 'math.sin', 'math.sin', (['(x * (2 * math.pi * 400.0))'], {}), '(x * (2 * math.pi * 400.0))\n', (1915, 1942), False, 'import math\n'), ((3476, 3500), 'numpy.where', 'np.where', (['(thrFil == True)'], {}), '(thrFil == True)\n', (3484, 3500), True, 'import numpy as np\n'), ((5564, 5624), 'SimulationLibrary.simulate', 'simulate', (['config', "config['source_position']", 'signal_function'], {}), "(config, config['source_position'], signal_function)\n", (5572, 5624), False, 'from SimulationLibrary import load_configs, simulate\n'), ((7600, 7624), 'numpy.median', 'np.median', (['K_estim_noise'], {}), '(K_estim_noise)\n', (7609, 7624), True, 'import numpy as np\n'), ((7643, 7668), 'numpy.average', 'np.average', (['K_estim_noise'], {}), '(K_estim_noise)\n', (7653, 7668), True, 'import numpy as np\n'), ((7721, 7736), 'numpy.average', 'np.average', (['lol'], {}), '(lol)\n', (7731, 7736), True, 'import numpy as np\n'), ((5880, 5929), 'SignalProcessingLibrary.butterWorthFilter', 'butterWorthFilter', (['s', "meta['sampling_rate']", '(2000)'], {}), "(s, meta['sampling_rate'], 2000)\n", (5897, 5929), False, 'from SignalProcessingLibrary import butterWorthFilter, tdoa_csom, centralSecondOrderMomentFunction_part, getSignalPower_UsingTime_AverageFree, tdoa_gcc_phat, centralSecondOrderMomentFunction\n'), ((6309, 6341), 'GeometryLibrary.distance', 'distance', (['micList[k]', 'source_pos'], {}), '(micList[k], source_pos)\n', (6317, 6341), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n'), ((3546, 3561), 'numpy.flip', 'np.flip', (['thrFil'], {}), '(thrFil)\n', (3553, 3561), True, 'import numpy as np\n'), ((5328, 5346), 'GeometryLibrary.angle_radians', 'angle_radians', (['ang'], {}), '(ang)\n', (5341, 5346), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n'), ((5359, 5377), 'GeometryLibrary.angle_radians', 'angle_radians', (['ang'], {}), '(ang)\n', (5372, 5377), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n'), ((6276, 6308), 'GeometryLibrary.distance', 'distance', (['micList[k]', 'source_pos'], {}), '(micList[k], source_pos)\n', (6284, 6308), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n'), ((6805, 6892), 'GeometryLibrary.estimateK_Pair', 'estimateK_Pair', (['signalsPower[i]', 'signalsPower[j]', 'micList[i]', 'micList[j]', '(a * 343.2)'], {}), '(signalsPower[i], signalsPower[j], micList[i], micList[j], a *\n 343.2)\n', (6819, 6892), False, 'from GeometryLibrary import getPoint, estimateK_Pair, distance, getAngle_Pair, getAngle_angle1, angle_degree, KarstenDOA_calculateSteep_linear_simple\n'), ((6934, 7021), 'GeometryLibrary.estimateK_Pair', 'estimateK_Pair', (['signalsPower[i]', 'signalsPower[j]', 'micList[i]', 'micList[j]', '(b * 343.2)'], {}), '(signalsPower[i], signalsPower[j], micList[i], micList[j], b *\n 343.2)\n', (6948, 7021), False, 'from GeometryLibrary import getPoint, estimateK_Pair, distance, getAngle_Pair, getAngle_angle1, angle_degree, KarstenDOA_calculateSteep_linear_simple\n'), ((5097, 5129), 'GeometryLibrary.angle_radians', 'angle_radians', (['(360 / 8 * (2 - i))'], {}), '(360 / 8 * (2 - i))\n', (5110, 5129), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n'), ((5141, 5173), 'GeometryLibrary.angle_radians', 'angle_radians', (['(360 / 8 * (2 - i))'], {}), '(360 / 8 * (2 - i))\n', (5154, 5173), False, 'from GeometryLibrary import getAngle_angle1, angle_radians, getPoint, distance\n')]
|
# %%
from multiprocessing.sharedctypes import Value
import os
from torch.utils.data import DataLoader
from dataloaders.csv_data_loader import CSVDataLoader
from dotenv import load_dotenv
import numpy as np
from torchvision import transforms
load_dotenv()
DATA_FOLDER_PATH = os.getenv("DATA_FOLDER_PATH")
def get_normalization_mean_std(dataset: str = None, datasheet : str = None):
if datasheet:
MASTER_PATH = datasheet
if dataset:
if dataset == 'leaf':
DATA_PATH = "leaves_segmented_master.csv"
elif dataset == 'plant':
DATA_PATH = "plant_data_split_master.csv"
elif dataset == 'plant_golden':
DATA_PATH = "plant_data_split_golden.csv"
else:
raise ValueError(f"Dataset {dataset} not defined. Accepted values: plant, plant_golden, leaf")
MASTER_PATH = os.path.join(DATA_FOLDER_PATH, DATA_PATH)
transform = transforms.Compose([
transforms.ToPILImage(),
transforms.Resize(224),
transforms.ToTensor()
])
master_dataset = CSVDataLoader(
csv_file=MASTER_PATH,
root_dir=DATA_FOLDER_PATH,
image_path_col="Split masked image path",
label_col="Label",
transform=transform
)
BATCH_SIZE = 1
master_dataloader = DataLoader(master_dataset, batch_size=BATCH_SIZE, shuffle=False, num_workers=4)
image_mean = []
image_std = []
for i, data in enumerate(master_dataloader):
# shape (batch_size, 3, height, width)
numpy_image = data['image'].numpy()
# shape (3,)
batch_mean = np.mean(numpy_image, axis=(0, 2, 3))
batch_std0 = np.std(numpy_image, axis=(0, 2, 3))
image_mean.append(batch_mean)
image_std.append(batch_std0)
image_mean = np.array(image_mean).mean(axis=0)
image_std = np.array(image_std).mean(axis=0)
# print(f"Image mean: {image_mean}")
# print(f"Image std: {image_std}")
return image_mean, image_std
|
[
"torch.utils.data.DataLoader",
"numpy.std",
"torchvision.transforms.ToPILImage",
"torchvision.transforms.ToTensor",
"dotenv.load_dotenv",
"numpy.mean",
"numpy.array",
"dataloaders.csv_data_loader.CSVDataLoader",
"os.path.join",
"os.getenv",
"torchvision.transforms.Resize"
] |
[((243, 256), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (254, 256), False, 'from dotenv import load_dotenv\n'), ((277, 306), 'os.getenv', 'os.getenv', (['"""DATA_FOLDER_PATH"""'], {}), "('DATA_FOLDER_PATH')\n", (286, 306), False, 'import os\n'), ((1066, 1219), 'dataloaders.csv_data_loader.CSVDataLoader', 'CSVDataLoader', ([], {'csv_file': 'MASTER_PATH', 'root_dir': 'DATA_FOLDER_PATH', 'image_path_col': '"""Split masked image path"""', 'label_col': '"""Label"""', 'transform': 'transform'}), "(csv_file=MASTER_PATH, root_dir=DATA_FOLDER_PATH,\n image_path_col='Split masked image path', label_col='Label', transform=\n transform)\n", (1079, 1219), False, 'from dataloaders.csv_data_loader import CSVDataLoader\n'), ((1302, 1381), 'torch.utils.data.DataLoader', 'DataLoader', (['master_dataset'], {'batch_size': 'BATCH_SIZE', 'shuffle': '(False)', 'num_workers': '(4)'}), '(master_dataset, batch_size=BATCH_SIZE, shuffle=False, num_workers=4)\n', (1312, 1381), False, 'from torch.utils.data import DataLoader\n'), ((862, 903), 'os.path.join', 'os.path.join', (['DATA_FOLDER_PATH', 'DATA_PATH'], {}), '(DATA_FOLDER_PATH, DATA_PATH)\n', (874, 903), False, 'import os\n'), ((1607, 1643), 'numpy.mean', 'np.mean', (['numpy_image'], {'axis': '(0, 2, 3)'}), '(numpy_image, axis=(0, 2, 3))\n', (1614, 1643), True, 'import numpy as np\n'), ((1665, 1700), 'numpy.std', 'np.std', (['numpy_image'], {'axis': '(0, 2, 3)'}), '(numpy_image, axis=(0, 2, 3))\n', (1671, 1700), True, 'import numpy as np\n'), ((950, 973), 'torchvision.transforms.ToPILImage', 'transforms.ToPILImage', ([], {}), '()\n', (971, 973), False, 'from torchvision import transforms\n'), ((983, 1005), 'torchvision.transforms.Resize', 'transforms.Resize', (['(224)'], {}), '(224)\n', (1000, 1005), False, 'from torchvision import transforms\n'), ((1015, 1036), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1034, 1036), False, 'from torchvision import transforms\n'), ((1796, 1816), 'numpy.array', 'np.array', (['image_mean'], {}), '(image_mean)\n', (1804, 1816), True, 'import numpy as np\n'), ((1846, 1865), 'numpy.array', 'np.array', (['image_std'], {}), '(image_std)\n', (1854, 1865), True, 'import numpy as np\n')]
|
# ===========================================================================
# heightmap.py ------------------------------------------------------------
# ===========================================================================
# import ------------------------------------------------------------------
# ---------------------------------------------------------------------------
from shdw.__init__ import _logger
import shdw.utils.ply
import shdw.config.settings
import shdw.utils.imgtools
import tempfile
import subprocess
import pandas
import pathlib
import cv2
import numpy as np
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def get_height_map(img, height=dict(), show=False):
dim_new = (img.shape[0]*img.shape[1])
img = shdw.utils.imgtools.expand_image_dim(img.astype(float))
if show:
height_factor = float(np.max(img))/(float(np.max([img.shape[0], img.shape[1]])) / 10)
img = img/height_factor
grid = np.indices((img.shape[0], img.shape[1]), dtype="float")
height.update(
{
'x': grid[0,...].reshape(dim_new).T,
'y': grid[1,...].reshape(dim_new).T,
'z': img[...,0].reshape(dim_new).T
}
)
return height
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def colorize_height_map(img, height=dict()):
img_width, img_height, _ = img.shape
dim_new =(img_width*img_height)
height.update(
{
'red': img[:,:,0].reshape((img.shape[0]*img.shape[1])).T,
'green': img[:,:,1].reshape((img.shape[0]*img.shape[1])).T,
'blue': img[:,:,2].reshape((img.shape[0]*img.shape[1])).T
}
)
return height
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def add_intensity_to_height_map(img, height=dict()):
try:
img_width, img_height, _ = img.shape
dim_new =(img_width*img_height)
height.update(
{
'intensity': img[:,:,1].reshape((img.shape[0]*img.shape[1])).T
}
)
except AttributeError:
pass
return height
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def get_colorized_height_map(img, height, label=None, show=False):
data = get_height_map(height, show=show)
data = colorize_height_map(img, data)
try:
data = add_intensity_to_height_map(label, data)
except ValueError:
pass
return data
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def write_height_map(img, height, path):
data = pandas.DataFrame(height, index=range(img.shape[0]*img.shape[1]))
# write to temporary file
_logger.info("[SAVE] '{}'".format(path))
shdw.utils.ply.write_ply(path, points=data)
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def read_height_map(path):
return shdw.utils.ply.read_ply(path)["points"]
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def get_normal_image(img, height, bins=None, verbose=False, show=False):
data = get_colorized_height_map(img, height, show=show)
# create a temporary file
path = tempfile.mkstemp(prefix="shdw-", suffix=".ply")[1]
write_height_map(img, data, path)
compute_normals(path, verbose=verbose)
normals = read_height_map(path)["nz"].to_numpy().reshape(height.shape)*(-1.)+1.
normals = np.where(normals>0., normals, np.min(normals[normals>0.]))
normals = shdw.utils.imgtools.project_data_to_img(-np.log(normals))
if bins:
normals = np.ceil(normals*bins)
normals = (np.where(normals==0., 1., normals) - 1.)/(bins-1.)
return normals
# plt.hist(hm.reshape(-1,1),bins="auto", histtype="step")
# print(np.unique(hm))
# plt.show()
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def compute_normals(path, verbose=False):
args = shdw.config.settings._SETTINGS["cloud_normals_args"]
cloud_process = "cloud_process" if not verbose else "cloud_process_verbose"
process = subprocess.Popen(
get_args(
shdw.config.settings._SETTINGS[cloud_process],
[item.format(**{"obj": { "path": path}}) for item in args]
)
)
process.wait()
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def compute_mesh(path, verbose=False):
args = shdw.config.settings._SETTINGS["cloud_delaunay_args"]
cloud_process = "cloud_process" if not verbose else "cloud_process_verbose"
process = subprocess.Popen(
get_args(
shdw.config.settings._SETTINGS[cloud_process],
[item.format(**{"obj": { "path": path}}) for item in args]
)
)
process.wait()
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def open_height_map(path, ccviewer=False):
if ccviewer:
subprocess.Popen(
get_args( shdw.config.settings._SETTINGS["cloud_viewer"], path)
)
else:
subprocess.Popen(
get_args(shdw.config.settings._SETTINGS["cloud_editor"],path)
)
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def main(img, height, label=None, verbose=False, normals=False, mesh=False, ccviewer=True, attempt=False, show=False):
data = get_colorized_height_map(img, height, label=label, show=show)
# create a temporary file
path = tempfile.mkstemp(prefix="shdw-", suffix=".ply")[1]
write_height_map(img, data, path)
if normals:
compute_normals(path, verbose=verbose)
if mesh:
compute_mesh(path, verbose=verbose)
open_height_map(path, ccviewer=ccviewer)
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def get_args(cmd, *args):
cmd = cmd.copy() if isinstance(cmd, list) else [cmd]
for a in args:
cmd.extend(*to_list(a))
return " ".join(cmd)
# function ----------------------------------------------------------------
# ---------------------------------------------------------------------------
def to_list(*args):
return (x if isinstance(x, list) or x is None else [x] for x in args)
|
[
"numpy.log",
"numpy.ceil",
"tempfile.mkstemp",
"numpy.indices",
"numpy.min",
"numpy.max",
"numpy.where"
] |
[((1066, 1121), 'numpy.indices', 'np.indices', (['(img.shape[0], img.shape[1])'], {'dtype': '"""float"""'}), "((img.shape[0], img.shape[1]), dtype='float')\n", (1076, 1121), True, 'import numpy as np\n'), ((3786, 3833), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'prefix': '"""shdw-"""', 'suffix': '""".ply"""'}), "(prefix='shdw-', suffix='.ply')\n", (3802, 3833), False, 'import tempfile\n'), ((4049, 4079), 'numpy.min', 'np.min', (['normals[normals > 0.0]'], {}), '(normals[normals > 0.0])\n', (4055, 4079), True, 'import numpy as np\n'), ((4182, 4205), 'numpy.ceil', 'np.ceil', (['(normals * bins)'], {}), '(normals * bins)\n', (4189, 4205), True, 'import numpy as np\n'), ((6375, 6422), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'prefix': '"""shdw-"""', 'suffix': '""".ply"""'}), "(prefix='shdw-', suffix='.ply')\n", (6391, 6422), False, 'import tempfile\n'), ((4133, 4148), 'numpy.log', 'np.log', (['normals'], {}), '(normals)\n', (4139, 4148), True, 'import numpy as np\n'), ((958, 969), 'numpy.max', 'np.max', (['img'], {}), '(img)\n', (964, 969), True, 'import numpy as np\n'), ((4223, 4261), 'numpy.where', 'np.where', (['(normals == 0.0)', '(1.0)', 'normals'], {}), '(normals == 0.0, 1.0, normals)\n', (4231, 4261), True, 'import numpy as np\n'), ((978, 1014), 'numpy.max', 'np.max', (['[img.shape[0], img.shape[1]]'], {}), '([img.shape[0], img.shape[1]])\n', (984, 1014), True, 'import numpy as np\n')]
|
from pip import main
import pygame
import cv2
from matplotlib import pyplot as plt
import numpy as np
def get_bkgr():
image = cv2.imread("frame1.jpg")
mask = cv2.imread("mask.jpg")
grayImage = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY)
(thresh, blackAndWhiteImage) = cv2.threshold(grayImage, 127, 255, cv2.THRESH_BINARY)
bkgr = cv2.bitwise_and(image, image, mask=blackAndWhiteImage)
cv2.imwrite("bgr.jpg", bkgr)
def video_play():
pygame.display.init()
pygame.display.update()
print("=================================")
video = cv2.VideoCapture("./video/vid_use.mp4")
success, video_image = video.read()
print(video_image)
cv2.imwrite("frame1.jpg", video_image)
frame1 = cv2.imread("frame1.jpg")
h, w, l = video_image.shape
img_mask = np.zeros((h,w), dtype=np.uint8)
temp_mask = np.zeros((h,w), dtype=np.uint8)
fps = video.get(cv2.CAP_PROP_FPS)
RED = (255, 0, 0)
window = pygame.display.set_mode(video_image.shape[1::-1])
clock = pygame.time.Clock()
run = success
brush = None
paused = False
brush_size = 50
print("=================================")
print("Press left mouse button and hold to color the background and let go when done")
print("Press the PLUS or MINUS key to increase or decrease the size of the brush (respectively)")
print("=================================")
while run:
clock.tick(fps)
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
elif event.type == pygame.MOUSEBUTTONDOWN:
# print("Mouse buton Down")
paused = True
if event.button == 1: # left button pressed
brush = event.pos
elif event.type == pygame.MOUSEBUTTONUP:
# print("Mouse buton Up")
# cv2.add(img_mask, temp_mask)
paused = False
temp_mask = np.zeros((h,w), dtype=np.uint8)
if event.button == 1: # left button released
brush = None
elif event.type == pygame.MOUSEMOTION:
if brush: # left button still pressed
paused = True
brush = event.pos
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_KP_MINUS:
print("Brush Size: ", brush_size)
brush_size = brush_size - 10
if event.key == pygame.K_KP_PLUS:
print("Brush Size: ", brush_size)
brush_size = brush_size + 10
if event.key == pygame.K_SPACE:
if paused == False:
paused = True
else:
paused = False
# draw brush in bufor
if brush:
pygame.draw.circle(window, RED, (brush[0], brush[1]), brush_size)
# cv2.circle(video_image, (brush[0], brush[1]), 10, (255, 0, 0, 0), -1)
cv2.circle(temp_mask, (brush[0], brush[1]), brush_size, (255, 255, 255), -1)
# temp_mask[brush[0], brush[1]] = 255
temp_mask = cv2.add(temp_mask, temp_mask)
img_mask = cv2.add(img_mask, temp_mask)
# plt.imshow(temp_mask)
# plt.show()
# send bufor on the screen
pygame.display.flip()
if not paused:
success, video_image = video.read()
video_surf = None
if success:
h, w, l = video_image.shape
video_surf = pygame.image.frombuffer(
video_image.tobytes(), video_image.shape[1::-1], "BGR")
else:
break
window.blit(video_surf, (0, 0))
pygame.display.flip()
pygame.quit()
cv2.imwrite("mask.jpg", img_mask)
get_bkgr()
plt.figure()
plt.imshow(img_mask)
plt.show()
video_play()
|
[
"cv2.bitwise_and",
"pygame.event.get",
"pygame.display.update",
"matplotlib.pyplot.figure",
"cv2.cvtColor",
"cv2.imwrite",
"pygame.display.set_mode",
"matplotlib.pyplot.imshow",
"pygame.quit",
"cv2.circle",
"matplotlib.pyplot.show",
"pygame.time.Clock",
"cv2.add",
"pygame.display.init",
"pygame.draw.circle",
"cv2.threshold",
"numpy.zeros",
"pygame.display.flip",
"cv2.VideoCapture",
"cv2.imread"
] |
[((132, 156), 'cv2.imread', 'cv2.imread', (['"""frame1.jpg"""'], {}), "('frame1.jpg')\n", (142, 156), False, 'import cv2\n'), ((168, 190), 'cv2.imread', 'cv2.imread', (['"""mask.jpg"""'], {}), "('mask.jpg')\n", (178, 190), False, 'import cv2\n'), ((207, 245), 'cv2.cvtColor', 'cv2.cvtColor', (['mask', 'cv2.COLOR_BGR2GRAY'], {}), '(mask, cv2.COLOR_BGR2GRAY)\n', (219, 245), False, 'import cv2\n'), ((281, 334), 'cv2.threshold', 'cv2.threshold', (['grayImage', '(127)', '(255)', 'cv2.THRESH_BINARY'], {}), '(grayImage, 127, 255, cv2.THRESH_BINARY)\n', (294, 334), False, 'import cv2\n'), ((346, 400), 'cv2.bitwise_and', 'cv2.bitwise_and', (['image', 'image'], {'mask': 'blackAndWhiteImage'}), '(image, image, mask=blackAndWhiteImage)\n', (361, 400), False, 'import cv2\n'), ((407, 435), 'cv2.imwrite', 'cv2.imwrite', (['"""bgr.jpg"""', 'bkgr'], {}), "('bgr.jpg', bkgr)\n", (418, 435), False, 'import cv2\n'), ((460, 481), 'pygame.display.init', 'pygame.display.init', ([], {}), '()\n', (479, 481), False, 'import pygame\n'), ((486, 509), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (507, 509), False, 'import pygame\n'), ((569, 608), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""./video/vid_use.mp4"""'], {}), "('./video/vid_use.mp4')\n", (585, 608), False, 'import cv2\n'), ((676, 714), 'cv2.imwrite', 'cv2.imwrite', (['"""frame1.jpg"""', 'video_image'], {}), "('frame1.jpg', video_image)\n", (687, 714), False, 'import cv2\n'), ((728, 752), 'cv2.imread', 'cv2.imread', (['"""frame1.jpg"""'], {}), "('frame1.jpg')\n", (738, 752), False, 'import cv2\n'), ((800, 832), 'numpy.zeros', 'np.zeros', (['(h, w)'], {'dtype': 'np.uint8'}), '((h, w), dtype=np.uint8)\n', (808, 832), True, 'import numpy as np\n'), ((848, 880), 'numpy.zeros', 'np.zeros', (['(h, w)'], {'dtype': 'np.uint8'}), '((h, w), dtype=np.uint8)\n', (856, 880), True, 'import numpy as np\n'), ((953, 1002), 'pygame.display.set_mode', 'pygame.display.set_mode', (['video_image.shape[1::-1]'], {}), '(video_image.shape[1::-1])\n', (976, 1002), False, 'import pygame\n'), ((1015, 1034), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (1032, 1034), False, 'import pygame\n'), ((3839, 3852), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (3850, 3852), False, 'import pygame\n'), ((3859, 3892), 'cv2.imwrite', 'cv2.imwrite', (['"""mask.jpg"""', 'img_mask'], {}), "('mask.jpg', img_mask)\n", (3870, 3892), False, 'import cv2\n'), ((3912, 3924), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3922, 3924), True, 'from matplotlib import pyplot as plt\n'), ((3929, 3949), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img_mask'], {}), '(img_mask)\n', (3939, 3949), True, 'from matplotlib import pyplot as plt\n'), ((3954, 3964), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3962, 3964), True, 'from matplotlib import pyplot as plt\n'), ((1462, 1480), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (1478, 1480), False, 'import pygame\n'), ((3390, 3411), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (3409, 3411), False, 'import pygame\n'), ((2891, 2956), 'pygame.draw.circle', 'pygame.draw.circle', (['window', 'RED', '(brush[0], brush[1])', 'brush_size'], {}), '(window, RED, (brush[0], brush[1]), brush_size)\n', (2909, 2956), False, 'import pygame\n'), ((3053, 3129), 'cv2.circle', 'cv2.circle', (['temp_mask', '(brush[0], brush[1])', 'brush_size', '(255, 255, 255)', '(-1)'], {}), '(temp_mask, (brush[0], brush[1]), brush_size, (255, 255, 255), -1)\n', (3063, 3129), False, 'import cv2\n'), ((3204, 3233), 'cv2.add', 'cv2.add', (['temp_mask', 'temp_mask'], {}), '(temp_mask, temp_mask)\n', (3211, 3233), False, 'import cv2\n'), ((3257, 3285), 'cv2.add', 'cv2.add', (['img_mask', 'temp_mask'], {}), '(img_mask, temp_mask)\n', (3264, 3285), False, 'import cv2\n'), ((3811, 3832), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (3830, 3832), False, 'import pygame\n'), ((1981, 2013), 'numpy.zeros', 'np.zeros', (['(h, w)'], {'dtype': 'np.uint8'}), '((h, w), dtype=np.uint8)\n', (1989, 2013), True, 'import numpy as np\n')]
|
import pandas as pd
import numpy as np
from sklearn.datasets import dump_svmlight_file
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from skmultilearn.model_selection import iterative_train_test_split
import json
import sklearn
import pickle
DATASET_PATH = "dataset/dataset_sample.csv"
CVE_LABEL_DATASET_PATH = "dataset/cve_labels_sample.csv"
# According to the usual division, we divide the dataset into 0.75:0.25 between the training and test data
# the splitted result will be saved in the dataset/splitted folder into 4 different files:
# splitted_train_x = training data (description/reference/etc.)
# splitted_train_y = label for training data
# splitted_test_x = test data
# splitted_test_y = label for test data
def split_dataset(INPUT_DATASET):
description_fields = ["cve_id", "merged"]
# Initiate the dataframe containing the CVE ID and its description
# Change the "merged" field in the description_fields variable to use other text feature such as reference
df = pd.read_csv(DATASET_PATH, usecols=description_fields)
# Read column names from file
cols = list(pd.read_csv(DATASET_PATH, nrows=1))
# Initiate the dataframe containing the labels for each CVE
pd_labels = pd.read_csv(DATASET_PATH,
usecols=[i for i in cols if i not in ["cve_id", "cleaned", "matchers", "merged"]])
# Initiate a list which contain the list of labels considered in te dataset
list_labels = [i for i in cols if i not in ["cve_id", "cleaned", "matchers", "merged"]]
# Convert to numpy for splitting
data = df.to_numpy()
labels = pd_labels.to_numpy()
# Splitting using skmultilearn iterative train test split
train, label_train, test, label_test = iterative_train_test_split(data, labels, test_size=0.25)
# Save the splitted data to files
np.save("dataset/splitted/splitted_train_x.npy", train, allow_pickle=True)
np.save("dataset/splitted/splitted_train_y.npy", label_train, allow_pickle=True)
np.save("dataset/splitted/splitted_test_x.npy", test, allow_pickle=True)
np.save("dataset/splitted/splitted_test_y.npy", label_test, allow_pickle=True)
# Extreme text dataset should be separated into training data and test data (validation data optional)
# The prepare_extreme_text_dataset will separate the cleaned dataset into train and test dataset in
# accordance to the format required by extreme_text, which are:
# __label__LIBRARYNAME1 __label__LIBRARYNAME2 ... CVE_DESCRIPTION
# the label prefix can be set to using the label_prefix parameter,
# we use the default __label__ following the fasttext tutorial
# This function assume that the splitted dataset is available in dataset/splitted folder
def prepare_extreme_text_dataset(TRAINING_OUTPUT, TEST_OUTPUT, label_prefix = "__label__"):
# Read column names from file
cols = list(pd.read_csv(DATASET_PATH, nrows=1))
# Initiate the dataframe containing the labels for each CVE
pd_labels = pd.read_csv(DATASET_PATH,
usecols=[i for i in cols if i not in ["cve_id", "description_text", "cpe_text", "merged"]])
# Initiate a list which contain the list of labels considered in te dataset
list_labels = [i for i in cols if i not in ["cve_id", "description_text", "cpe_text", "merged"]]
train_x = np.load("dataset/splitted/splitted_train_x.npy", allow_pickle=True)
train_y = np.load("dataset/splitted/splitted_train_y.npy", allow_pickle=True)
test_x = np.load("dataset/splitted/splitted_test_x.npy", allow_pickle=True)
test_y = np.load("dataset/splitted/splitted_test_y.npy", allow_pickle=True)
# process the training data to follow the extremetext requirements
train_data = []
# loop through all the training data
for i in range(len(train_x)):
text = train_x[i, 1]
# get the index for the labels
index_labels = np.nonzero(train_y[i])[0]
labels = ""
# loop through the label indexes, get the string representation and append to the string
for idx in index_labels:
labels = labels + label_prefix + list_labels[idx] + " "
train_data.append(labels + text.lstrip())
# write the train data to file
with open(TRAINING_OUTPUT, "w", encoding="utf-8") as w:
for line in train_data:
w.write(line + "\n")
w.close()
# Do the same for the test data
# process the test data to follow the extremetext requirements
test_data = []
# loop through all the training data
for i in range(len(test_x)):
text = test_x[i, 1]
# get the index for the labels
index_labels = np.nonzero(test_y[i])[0]
labels = ""
# loop through the label indexes, get the string representation and append to the string
for idx in index_labels:
labels = labels + label_prefix + list_labels[idx] + " "
test_data.append(labels + text.lstrip())
# write the train data to file
with open(TEST_OUTPUT, "w", encoding="utf-8") as w:
for line in test_data:
w.write(line + "\n")
w.close()
# the dataset expected by the omikuji is the splitted dataset that has been extraced into BoW features
# while it is similar to the dataset required LightXML, which can be processed through sklearn dump_svmlight file
# and the TfIdfVectorizer, the omikuji dataset requires a header in the svmlight file
# the header consist of the space separated elements: <number of examples> <number of features> <number of labels>
# This function assume that the splitted dataset is available in dataset/splitted folder
def prepare_omikuji_dataset():
# Load the splitted dataset files
train = np.load("dataset/splitted/splitted_train_x.npy", allow_pickle=True)
label_train = np.load("dataset/splitted/splitted_train_y.npy", allow_pickle=True)
test = np.load("dataset/splitted/splitted_test_x.npy", allow_pickle=True)
label_test = np.load("dataset/splitted/splitted_test_y.npy", allow_pickle=True)
train_corpus = train[:, 1].tolist()
test_corpus = test[:, 1].tolist()
cols = list(pd.read_csv(DATASET_PATH, nrows=1))
label_columns = [i for i in cols if i not in ["cve_id", "cleaned", "matchers", "merged"]]
num_labels = len(label_columns)
vectorizer = TfidfVectorizer().fit(train_corpus)
idx_zero_train = np.argwhere(np.all(label_train[..., :] == 0, axis=0))
idx_zero_test = np.argwhere(np.all(label_test[..., :] == 0, axis=0))
train_X = vectorizer.transform(train_corpus)
# train_Y = np.delete(label_train, idx_zero_train, axis=1)
train_Y = label_train
test_X = vectorizer.transform(test_corpus)
# test_Y = np.delete(label_test, idx_zero_test, axis=1)
test_Y = label_test
num_features = len(vectorizer.get_feature_names())
num_row_train = train_X.shape[0]
num_row_test = test_X.shape[0]
train_file_header = num_row_train.__str__() + " " + num_features.__str__() + " " + (num_labels).__str__()
test_file_header = num_row_test.__str__() + " " + num_features.__str__() + " " + (num_labels).__str__()
# Dump the standard svmlight file
dump_svmlight_file(train_X, train_Y, "dataset/omikuji/train.txt", multilabel=True)
dump_svmlight_file(test_X, test_Y, "dataset/omikuji/test.txt", multilabel=True)
# Prepend the header to the svmlight file
with open("dataset/omikuji/train.txt", 'r+') as f:
content = f.read()
f.seek(0, 0)
f.write(train_file_header.rstrip('\r\n') + '\n' + content)
f.close()
with open("dataset/omikuji/test.txt", 'r+') as f:
content = f.read()
f.seek(0, 0)
f.write(test_file_header.rstrip('\r\n') + '\n' + content)
f.close()
def prepare_fastxml_dataset():
# use the splitted dataset to create the train and test json required for the fastxml algorithm
train = np.load("dataset/splitted/splitted_train_x.npy", allow_pickle=True)
test = np.load("dataset/splitted/splitted_test_x.npy", allow_pickle=True)
df_labels = pd.read_csv(DATASET_PATH, usecols=["cve_id", "labels"])
with open("dataset/fastxml/train.json", "w") as f:
for data in train:
json_rep = {}
json_rep["title"] = data[1].lstrip().rstrip()
cve_id = data[0]
cve_labels = df_labels[df_labels["cve_id"] == cve_id]["labels"].values.__str__()
# Cleanup the label string from the cve_labels variable
cve_labels = cve_labels.replace("[", "")
cve_labels = cve_labels.replace("]", "")
cve_labels = cve_labels.replace("'", "")
cve_labels = cve_labels.replace('"', "")
cve_labels = cve_labels.replace(" ", "")
cve_labels = cve_labels.split(",")
json_rep["tags"] = cve_labels
json.dump(json_rep, f, ensure_ascii=False)
f.write("\n")
# with open("dataset/test_non_iterative.json", "w") as f:
with open("dataset/fastxml/test.json", "w") as f:
for data in test:
json_rep = {}
json_rep["title"] = data[1].lstrip().rstrip()
cve_id = data[0]
cve_labels = df_labels[df_labels["cve_id"] == cve_id]["labels"].values.__str__()
# Cleanup the label string from the cve_labels variable
cve_labels = cve_labels.replace("[", "")
cve_labels = cve_labels.replace("]", "")
cve_labels = cve_labels.replace("'", "")
cve_labels = cve_labels.replace('"', "")
cve_labels = cve_labels.replace(" ", "")
cve_labels = cve_labels.split(",")
json_rep["tags"] = cve_labels
json.dump(json_rep, f, ensure_ascii=False)
f.write("\n")
# The XML CNN dataset is in the form of tab-separated files (tsv) with three columns
# first column is the ID, an integer/String representing the ID of the entry
# second column is the labels
# third column is the text
# moreover, the XML CNN also require validation data, which by default is taken from the training data (25% of train data)
def prepare_xmlcnn_dataset():
# load the splitted test train data
train_unsplitted_x = np.load("dataset/splitted/splitted_train_x.npy", allow_pickle=True)
train_unsplitted_y = np.load("dataset/splitted/splitted_train_y.npy", allow_pickle=True)
test_x = np.load("dataset/splitted/splitted_test_x.npy", allow_pickle=True)
test_y = np.load("dataset/splitted/splitted_test_y.npy", allow_pickle=True)
# then re-split the train data to make the validation data
train_x, train_y, valid_x, valid_y = iterative_train_test_split(train_unsplitted_x, train_unsplitted_y, test_size=0.25)
# start with the train data
with open("dataset/XML-CNN/train.txt", "w", encoding="utf-8") as w:
for i in range(len(train_x)):
id = train_x[i][0]
text = train_x[i][1]
labels = np.nonzero(train_y[i])[0]
# initiate the entry with the id
entry = id + "\t"
# then the labels
for label in labels:
entry = entry + label.__str__() + " "
# then the text, remove the last space from the label entry
entry = entry[:-1] + "\t" + text
# finally, write the entry to the file
w.write(entry + "\n")
# then the validation data
with open("dataset/XML-CNN/valid.txt", "w", encoding="utf-8") as w:
for i in range(len(valid_x)):
id = valid_x[i][0]
text = valid_x[i][1]
labels = np.nonzero(valid_y[i])[0]
# initiate the entry with the id
entry = id + "\t"
# then the labels
for label in labels:
entry = entry + label.__str__() + " "
# then the text, remove the last space from the label entry
entry = entry[:-1] + "\t" + text
# finally, write the entry to the file
w.write(entry + "\n")
# finally, the test data
with open("dataset/XML-CNN/test.txt", "w", encoding="utf-8") as w:
for i in range(len(test_x)):
id = test_x[i][0]
text = test_x[i][1]
labels = np.nonzero(test_y[i])[0]
# initiate the entry with the id
entry = id + "\t"
# then the labels
for label in labels:
entry = entry + label.__str__() + " "
# then the text, remove the last space from the label entry
entry = entry[:-1] + "\t" + text
# finally, write the entry to the file
w.write(entry + "\n")
print("XML CNN dataset created successfully!!!")
# the test and train data are the same with omikuji
# however, you need to create the train/test_labels.txt and train/test_texts.txt
# with each row contains the text and labels for the train/test data
def prepare_lightxml_dataset():
# # Load the splitted dataset files
train = np.load("dataset/splitted/splitted_train_x.npy", allow_pickle=True)
label_train = np.load("dataset/splitted/splitted_train_y.npy", allow_pickle=True)
test = np.load("dataset/splitted/splitted_test_x.npy", allow_pickle=True)
label_test = np.load("dataset/splitted/splitted_test_y.npy", allow_pickle=True)
train_corpus = train[:, 1].tolist()
test_corpus = test[:, 1].tolist()
cols = list(pd.read_csv(DATASET_PATH, nrows=1))
label_columns = [i for i in cols if i not in ["cve_id", "cleaned", "matchers", "merged"]]
num_labels = len(label_columns)
vectorizer = TfidfVectorizer().fit(train_corpus)
idx_zero_train = np.argwhere(np.all(label_train[..., :] == 0, axis=0))
idx_zero_test = np.argwhere(np.all(label_test[..., :] == 0, axis=0))
train_X = vectorizer.transform(train_corpus)
train_Y = label_train
test_X = vectorizer.transform(test_corpus)
test_Y = label_test
num_features = len(vectorizer.get_feature_names())
num_row_train = train_X.shape[0]
num_row_test = test_X.shape[0]
# Dump the standard svmlight file
dump_svmlight_file(train_X, train_Y, "dataset/lightxml/train.txt", multilabel=True)
dump_svmlight_file(test_X, test_Y, "dataset/lightxml/test.txt", multilabel=True)
train_text = []
train_label = []
test_text = []
test_label = []
cve_labels = pd.read_csv(CVE_LABEL_DATASET_PATH)
train_data = pd.read_csv("dataset/splitted/dataset_train.csv")
# process the label and text here
for index, row in train_data.iterrows():
train_text.append(row.merged.lstrip().rstrip())
# for label below
label = cve_labels[cve_labels["cve_id"] == row.cve_id]
label_unsplit = label.labels.values[0]
label_array = label_unsplit.split(",")
label_string = ""
for label in label_array:
label_string = label_string + label + " "
label_string = label_string.rstrip()
# print(label_string)
train_label.append(label_string)
test_data = pd.read_csv("dataset/splitted/dataset_test.csv")
for index, row in test_data.iterrows():
test_text.append(row.merged.lstrip().rstrip())
# for label below
label = cve_labels[cve_labels["cve_id"] == row.cve_id]
label_unsplit = label.labels.values[0]
label_array = label_unsplit.split(",")
label_string = ""
for label in label_array:
label_string = label_string + label + " "
label_string = label_string.rstrip()
# print(label_string)
test_label.append(label_string)
with open("dataset/lightxml/train_texts.txt", "w", encoding="utf-8") as wr:
for line in train_text:
wr.write(line + "\n")
with open("dataset/lightxml/train_labels.txt", "w", encoding="utf-8") as wr:
for line in train_label:
wr.write(line + "\n")
with open("dataset/lightxml/test_texts.txt", "w", encoding="utf-8") as wr:
for line in test_text:
wr.write(line + "\n")
with open("dataset/lightxml/test_labels.txt", "w", encoding="utf-8") as wr:
for line in test_label:
wr.write(line + "\n")
# the test and train data are in the svmlight format
# different from omikuji and lightxml, dont use tfidf but just use bag of words
def prepare_dismec_dataset():
# Load the splitted dataset files
train = np.load("dataset/splitted/splitted_train_x.npy", allow_pickle=True)
label_train = np.load("dataset/splitted/splitted_train_y.npy", allow_pickle=True)
test = np.load("dataset/splitted/splitted_test_x.npy", allow_pickle=True)
label_test = np.load("dataset/splitted/splitted_test_y.npy", allow_pickle=True)
train_corpus = train[:, 1].tolist()
test_corpus = test[:, 1].tolist()
cols = list(pd.read_csv(DATASET_PATH, nrows=1))
label_columns = [i for i in cols if i not in ["cve_id", "cleaned", "matchers", "merged"]]
num_labels = len(label_columns)
vectorizer = CountVectorizer().fit(train_corpus)
idx_zero_train = np.argwhere(np.all(label_train[..., :] == 0, axis=0))
idx_zero_test = np.argwhere(np.all(label_test[..., :] == 0, axis=0))
train_X = vectorizer.transform(train_corpus)
# train_Y = np.delete(label_train, idx_zero_train, axis=1)
train_Y = label_train
test_X = vectorizer.transform(test_corpus)
# test_Y = np.delete(label_test, idx_zero_test, axis=1)
test_Y = label_test
num_features = len(vectorizer.get_feature_names())
num_row_train = train_X.shape[0]
num_row_test = test_X.shape[0]
# Dump the standard svmlight file
dump_svmlight_file(train_X, train_Y, "dataset/dismec/train.txt", multilabel=True)
dump_svmlight_file(test_X, test_Y, "dataset/dismec/test.txt", multilabel=True)
|
[
"sklearn.datasets.dump_svmlight_file",
"json.dump",
"numpy.load",
"numpy.save",
"skmultilearn.model_selection.iterative_train_test_split",
"sklearn.feature_extraction.text.CountVectorizer",
"pandas.read_csv",
"sklearn.feature_extraction.text.TfidfVectorizer",
"numpy.nonzero",
"numpy.all"
] |
[((1046, 1099), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'usecols': 'description_fields'}), '(DATASET_PATH, usecols=description_fields)\n', (1057, 1099), True, 'import pandas as pd\n'), ((1270, 1382), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'usecols': "[i for i in cols if i not in ['cve_id', 'cleaned', 'matchers', 'merged']]"}), "(DATASET_PATH, usecols=[i for i in cols if i not in ['cve_id',\n 'cleaned', 'matchers', 'merged']])\n", (1281, 1382), True, 'import pandas as pd\n'), ((1788, 1844), 'skmultilearn.model_selection.iterative_train_test_split', 'iterative_train_test_split', (['data', 'labels'], {'test_size': '(0.25)'}), '(data, labels, test_size=0.25)\n', (1814, 1844), False, 'from skmultilearn.model_selection import iterative_train_test_split\n'), ((1889, 1963), 'numpy.save', 'np.save', (['"""dataset/splitted/splitted_train_x.npy"""', 'train'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_x.npy', train, allow_pickle=True)\n", (1896, 1963), True, 'import numpy as np\n'), ((1969, 2054), 'numpy.save', 'np.save', (['"""dataset/splitted/splitted_train_y.npy"""', 'label_train'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_y.npy', label_train, allow_pickle=True\n )\n", (1976, 2054), True, 'import numpy as np\n'), ((2055, 2127), 'numpy.save', 'np.save', (['"""dataset/splitted/splitted_test_x.npy"""', 'test'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_x.npy', test, allow_pickle=True)\n", (2062, 2127), True, 'import numpy as np\n'), ((2133, 2211), 'numpy.save', 'np.save', (['"""dataset/splitted/splitted_test_y.npy"""', 'label_test'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_y.npy', label_test, allow_pickle=True)\n", (2140, 2211), True, 'import numpy as np\n'), ((3038, 3159), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'usecols': "[i for i in cols if i not in ['cve_id', 'description_text', 'cpe_text',\n 'merged']]"}), "(DATASET_PATH, usecols=[i for i in cols if i not in ['cve_id',\n 'description_text', 'cpe_text', 'merged']])\n", (3049, 3159), True, 'import pandas as pd\n'), ((3385, 3452), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_x.npy', allow_pickle=True)\n", (3392, 3452), True, 'import numpy as np\n'), ((3468, 3535), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_y.npy', allow_pickle=True)\n", (3475, 3535), True, 'import numpy as np\n'), ((3550, 3616), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_x.npy', allow_pickle=True)\n", (3557, 3616), True, 'import numpy as np\n'), ((3631, 3697), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_y.npy', allow_pickle=True)\n", (3638, 3697), True, 'import numpy as np\n'), ((5817, 5884), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_x.npy', allow_pickle=True)\n", (5824, 5884), True, 'import numpy as np\n'), ((5904, 5971), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_y.npy', allow_pickle=True)\n", (5911, 5971), True, 'import numpy as np\n'), ((5984, 6050), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_x.npy', allow_pickle=True)\n", (5991, 6050), True, 'import numpy as np\n'), ((6069, 6135), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_y.npy', allow_pickle=True)\n", (6076, 6135), True, 'import numpy as np\n'), ((7284, 7370), 'sklearn.datasets.dump_svmlight_file', 'dump_svmlight_file', (['train_X', 'train_Y', '"""dataset/omikuji/train.txt"""'], {'multilabel': '(True)'}), "(train_X, train_Y, 'dataset/omikuji/train.txt',\n multilabel=True)\n", (7302, 7370), False, 'from sklearn.datasets import dump_svmlight_file\n'), ((7372, 7451), 'sklearn.datasets.dump_svmlight_file', 'dump_svmlight_file', (['test_X', 'test_Y', '"""dataset/omikuji/test.txt"""'], {'multilabel': '(True)'}), "(test_X, test_Y, 'dataset/omikuji/test.txt', multilabel=True)\n", (7390, 7451), False, 'from sklearn.datasets import dump_svmlight_file\n'), ((8039, 8106), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_x.npy', allow_pickle=True)\n", (8046, 8106), True, 'import numpy as np\n'), ((8119, 8185), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_x.npy', allow_pickle=True)\n", (8126, 8185), True, 'import numpy as np\n'), ((8205, 8260), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'usecols': "['cve_id', 'labels']"}), "(DATASET_PATH, usecols=['cve_id', 'labels'])\n", (8216, 8260), True, 'import pandas as pd\n'), ((10387, 10454), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_x.npy', allow_pickle=True)\n", (10394, 10454), True, 'import numpy as np\n'), ((10481, 10548), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_y.npy', allow_pickle=True)\n", (10488, 10548), True, 'import numpy as np\n'), ((10563, 10629), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_x.npy', allow_pickle=True)\n", (10570, 10629), True, 'import numpy as np\n'), ((10644, 10710), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_y.npy', allow_pickle=True)\n", (10651, 10710), True, 'import numpy as np\n'), ((10819, 10905), 'skmultilearn.model_selection.iterative_train_test_split', 'iterative_train_test_split', (['train_unsplitted_x', 'train_unsplitted_y'], {'test_size': '(0.25)'}), '(train_unsplitted_x, train_unsplitted_y,\n test_size=0.25)\n', (10845, 10905), False, 'from skmultilearn.model_selection import iterative_train_test_split\n'), ((13235, 13302), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_x.npy', allow_pickle=True)\n", (13242, 13302), True, 'import numpy as np\n'), ((13322, 13389), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_y.npy', allow_pickle=True)\n", (13329, 13389), True, 'import numpy as np\n'), ((13402, 13468), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_x.npy', allow_pickle=True)\n", (13409, 13468), True, 'import numpy as np\n'), ((13487, 13553), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_y.npy', allow_pickle=True)\n", (13494, 13553), True, 'import numpy as np\n'), ((14377, 14464), 'sklearn.datasets.dump_svmlight_file', 'dump_svmlight_file', (['train_X', 'train_Y', '"""dataset/lightxml/train.txt"""'], {'multilabel': '(True)'}), "(train_X, train_Y, 'dataset/lightxml/train.txt',\n multilabel=True)\n", (14395, 14464), False, 'from sklearn.datasets import dump_svmlight_file\n'), ((14466, 14551), 'sklearn.datasets.dump_svmlight_file', 'dump_svmlight_file', (['test_X', 'test_Y', '"""dataset/lightxml/test.txt"""'], {'multilabel': '(True)'}), "(test_X, test_Y, 'dataset/lightxml/test.txt', multilabel=True\n )\n", (14484, 14551), False, 'from sklearn.datasets import dump_svmlight_file\n'), ((14653, 14688), 'pandas.read_csv', 'pd.read_csv', (['CVE_LABEL_DATASET_PATH'], {}), '(CVE_LABEL_DATASET_PATH)\n', (14664, 14688), True, 'import pandas as pd\n'), ((14709, 14758), 'pandas.read_csv', 'pd.read_csv', (['"""dataset/splitted/dataset_train.csv"""'], {}), "('dataset/splitted/dataset_train.csv')\n", (14720, 14758), True, 'import pandas as pd\n'), ((15343, 15391), 'pandas.read_csv', 'pd.read_csv', (['"""dataset/splitted/dataset_test.csv"""'], {}), "('dataset/splitted/dataset_test.csv')\n", (15354, 15391), True, 'import pandas as pd\n'), ((16743, 16810), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_x.npy', allow_pickle=True)\n", (16750, 16810), True, 'import numpy as np\n'), ((16830, 16897), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_train_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_train_y.npy', allow_pickle=True)\n", (16837, 16897), True, 'import numpy as np\n'), ((16910, 16976), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_x.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_x.npy', allow_pickle=True)\n", (16917, 16976), True, 'import numpy as np\n'), ((16995, 17061), 'numpy.load', 'np.load', (['"""dataset/splitted/splitted_test_y.npy"""'], {'allow_pickle': '(True)'}), "('dataset/splitted/splitted_test_y.npy', allow_pickle=True)\n", (17002, 17061), True, 'import numpy as np\n'), ((17990, 18076), 'sklearn.datasets.dump_svmlight_file', 'dump_svmlight_file', (['train_X', 'train_Y', '"""dataset/dismec/train.txt"""'], {'multilabel': '(True)'}), "(train_X, train_Y, 'dataset/dismec/train.txt', multilabel\n =True)\n", (18008, 18076), False, 'from sklearn.datasets import dump_svmlight_file\n'), ((18077, 18155), 'sklearn.datasets.dump_svmlight_file', 'dump_svmlight_file', (['test_X', 'test_Y', '"""dataset/dismec/test.txt"""'], {'multilabel': '(True)'}), "(test_X, test_Y, 'dataset/dismec/test.txt', multilabel=True)\n", (18095, 18155), False, 'from sklearn.datasets import dump_svmlight_file\n'), ((1152, 1186), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'nrows': '(1)'}), '(DATASET_PATH, nrows=1)\n', (1163, 1186), True, 'import pandas as pd\n'), ((2920, 2954), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'nrows': '(1)'}), '(DATASET_PATH, nrows=1)\n', (2931, 2954), True, 'import pandas as pd\n'), ((6233, 6267), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'nrows': '(1)'}), '(DATASET_PATH, nrows=1)\n', (6244, 6267), True, 'import pandas as pd\n'), ((6493, 6533), 'numpy.all', 'np.all', (['(label_train[..., :] == 0)'], {'axis': '(0)'}), '(label_train[..., :] == 0, axis=0)\n', (6499, 6533), True, 'import numpy as np\n'), ((6568, 6607), 'numpy.all', 'np.all', (['(label_test[..., :] == 0)'], {'axis': '(0)'}), '(label_test[..., :] == 0, axis=0)\n', (6574, 6607), True, 'import numpy as np\n'), ((13651, 13685), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'nrows': '(1)'}), '(DATASET_PATH, nrows=1)\n', (13662, 13685), True, 'import pandas as pd\n'), ((13919, 13959), 'numpy.all', 'np.all', (['(label_train[..., :] == 0)'], {'axis': '(0)'}), '(label_train[..., :] == 0, axis=0)\n', (13925, 13959), True, 'import numpy as np\n'), ((13994, 14033), 'numpy.all', 'np.all', (['(label_test[..., :] == 0)'], {'axis': '(0)'}), '(label_test[..., :] == 0, axis=0)\n', (14000, 14033), True, 'import numpy as np\n'), ((17159, 17193), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'nrows': '(1)'}), '(DATASET_PATH, nrows=1)\n', (17170, 17193), True, 'import pandas as pd\n'), ((17419, 17459), 'numpy.all', 'np.all', (['(label_train[..., :] == 0)'], {'axis': '(0)'}), '(label_train[..., :] == 0, axis=0)\n', (17425, 17459), True, 'import numpy as np\n'), ((17494, 17533), 'numpy.all', 'np.all', (['(label_test[..., :] == 0)'], {'axis': '(0)'}), '(label_test[..., :] == 0, axis=0)\n', (17500, 17533), True, 'import numpy as np\n'), ((3964, 3986), 'numpy.nonzero', 'np.nonzero', (['train_y[i]'], {}), '(train_y[i])\n', (3974, 3986), True, 'import numpy as np\n'), ((4742, 4763), 'numpy.nonzero', 'np.nonzero', (['test_y[i]'], {}), '(test_y[i])\n', (4752, 4763), True, 'import numpy as np\n'), ((6421, 6438), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {}), '()\n', (6436, 6438), False, 'from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer\n'), ((9000, 9042), 'json.dump', 'json.dump', (['json_rep', 'f'], {'ensure_ascii': '(False)'}), '(json_rep, f, ensure_ascii=False)\n', (9009, 9042), False, 'import json\n'), ((9870, 9912), 'json.dump', 'json.dump', (['json_rep', 'f'], {'ensure_ascii': '(False)'}), '(json_rep, f, ensure_ascii=False)\n', (9879, 9912), False, 'import json\n'), ((13843, 13860), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {}), '()\n', (13858, 13860), False, 'from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer\n'), ((17347, 17364), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {}), '()\n', (17362, 17364), False, 'from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer\n'), ((11137, 11159), 'numpy.nonzero', 'np.nonzero', (['train_y[i]'], {}), '(train_y[i])\n', (11147, 11159), True, 'import numpy as np\n'), ((11800, 11822), 'numpy.nonzero', 'np.nonzero', (['valid_y[i]'], {}), '(valid_y[i])\n', (11810, 11822), True, 'import numpy as np\n'), ((12457, 12478), 'numpy.nonzero', 'np.nonzero', (['test_y[i]'], {}), '(test_y[i])\n', (12467, 12478), True, 'import numpy as np\n')]
|
"""Contains a class representing a Vindinium map."""
import numpy as np
__all__ = ['Map']
class Map(object):
"""Represents static elements in the game.
Elements comprise walls, paths, taverns, mines and spawn points.
Attributes:
size (int): the board size (in a single axis).
"""
def __init__(self, size):
"""Constructor.
Args:
size (int): the board size.
"""
self.size = size
self._board = [[0 for i in range(size)] for j in range(size)]
def __getitem__(self, key):
"""Return an item in the map."""
return self._board[key[1]][key[0]]
def __setitem__(self, key, value):
"""Set an item in the map."""
self._board[key[1]][key[0]] = value
def __str__(self):
"""Pretty map."""
s = ' '
s += '-' * (self.size) + '\n'
for y in range(self.size):
s += '|'
for x in range(self.size):
s += str(self[x, y] or ' ')
s += '|\n'
s += ' ' + '-' * (self.size)
return s
def observe(self):
"""Return this map's board (basic representation)."""
return np.array(self._board)
|
[
"numpy.array"
] |
[((1186, 1207), 'numpy.array', 'np.array', (['self._board'], {}), '(self._board)\n', (1194, 1207), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : xds
# @wechat : 551883614
# @file: show_the_path.py
# @time: 2019-03-06 11:48:11
# @Software: PyCharm
import numpy as np
import math
# 输入旋转后的终点,中间得到一个转换矩阵,输出一个新的矩阵
def rotation_matrix(end_point):
"""
Return the rotation matrix associated with counterclockwise rotation about
the given axis by theta radians.
"""
end_point = np.asarray(end_point)
end_point = end_point / math.sqrt(np.dot(end_point, end_point))
theta_y = math.atan2(end_point[0], end_point[2])
theta_x = math.atan2(end_point[1], end_point[2])
theta_z = math.atan2(end_point[1], end_point[0])
r11 = math.cos(theta_z)*math.cos(theta_z) - math.sin(theta_x)*math.sin(theta_y)*math.sin(theta_z)
r12 = - math.cos(theta_x)*math.sin(theta_z)
r13 = math.sin(theta_y)*math.cos(theta_z) + math.sin(theta_x)*math.cos(theta_y)*math.sin(theta_z)
r21 = math.cos(theta_y)*math.sin(theta_z) + math.sin(theta_x)*math.sin(theta_y)*math.cos(theta_z)
r22 = math.cos(theta_x)*math.cos(theta_z)
r23 = math.sin(theta_y)*math.sin(theta_z) - math.sin(theta_x)*math.cos(theta_y)*math.cos(theta_z)
r31 = -math.cos(theta_x)*math.sin(theta_y)
r32 = math.sin(theta_x)
r33 = math.cos(theta_x)*math.cos(theta_y)
R = [[r11, r12, r13], [r21, r22, r23], [r31, r32, r33]]
return np.array(R)
# axis = np.asarray(axis)
# theta = np.asarray(theta)
# axis = axis / math.sqrt(np.dot(axis, axis))
# a = math.cos(theta / 2)
# b, c, d = -axis * math.sin(theta / 2)
# aa, bb, cc, dd = a * a, b * b, c * c, d * d
# bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d
# return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],
# [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],
# [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])
if __name__ == '__main__':
v = [1, 1, 1]
axis = [1, 1, 1]
print(np.dot(rotation_matrix(axis), v))
|
[
"math.atan2",
"numpy.asarray",
"math.sin",
"numpy.array",
"math.cos",
"numpy.dot"
] |
[((408, 429), 'numpy.asarray', 'np.asarray', (['end_point'], {}), '(end_point)\n', (418, 429), True, 'import numpy as np\n'), ((512, 550), 'math.atan2', 'math.atan2', (['end_point[0]', 'end_point[2]'], {}), '(end_point[0], end_point[2])\n', (522, 550), False, 'import math\n'), ((565, 603), 'math.atan2', 'math.atan2', (['end_point[1]', 'end_point[2]'], {}), '(end_point[1], end_point[2])\n', (575, 603), False, 'import math\n'), ((618, 656), 'math.atan2', 'math.atan2', (['end_point[1]', 'end_point[0]'], {}), '(end_point[1], end_point[0])\n', (628, 656), False, 'import math\n'), ((1216, 1233), 'math.sin', 'math.sin', (['theta_x'], {}), '(theta_x)\n', (1224, 1233), False, 'import math\n'), ((1351, 1362), 'numpy.array', 'np.array', (['R'], {}), '(R)\n', (1359, 1362), True, 'import numpy as np\n'), ((789, 806), 'math.sin', 'math.sin', (['theta_z'], {}), '(theta_z)\n', (797, 806), False, 'import math\n'), ((1021, 1038), 'math.cos', 'math.cos', (['theta_x'], {}), '(theta_x)\n', (1029, 1038), False, 'import math\n'), ((1039, 1056), 'math.cos', 'math.cos', (['theta_z'], {}), '(theta_z)\n', (1047, 1056), False, 'import math\n'), ((1188, 1205), 'math.sin', 'math.sin', (['theta_y'], {}), '(theta_y)\n', (1196, 1205), False, 'import math\n'), ((1244, 1261), 'math.cos', 'math.cos', (['theta_x'], {}), '(theta_x)\n', (1252, 1261), False, 'import math\n'), ((1262, 1279), 'math.cos', 'math.cos', (['theta_y'], {}), '(theta_y)\n', (1270, 1279), False, 'import math\n'), ((468, 496), 'numpy.dot', 'np.dot', (['end_point', 'end_point'], {}), '(end_point, end_point)\n', (474, 496), True, 'import numpy as np\n'), ((667, 684), 'math.cos', 'math.cos', (['theta_z'], {}), '(theta_z)\n', (675, 684), False, 'import math\n'), ((685, 702), 'math.cos', 'math.cos', (['theta_z'], {}), '(theta_z)\n', (693, 702), False, 'import math\n'), ((741, 758), 'math.sin', 'math.sin', (['theta_z'], {}), '(theta_z)\n', (749, 758), False, 'import math\n'), ((771, 788), 'math.cos', 'math.cos', (['theta_x'], {}), '(theta_x)\n', (779, 788), False, 'import math\n'), ((817, 834), 'math.sin', 'math.sin', (['theta_y'], {}), '(theta_y)\n', (825, 834), False, 'import math\n'), ((835, 852), 'math.cos', 'math.cos', (['theta_z'], {}), '(theta_z)\n', (843, 852), False, 'import math\n'), ((891, 908), 'math.sin', 'math.sin', (['theta_z'], {}), '(theta_z)\n', (899, 908), False, 'import math\n'), ((919, 936), 'math.cos', 'math.cos', (['theta_y'], {}), '(theta_y)\n', (927, 936), False, 'import math\n'), ((937, 954), 'math.sin', 'math.sin', (['theta_z'], {}), '(theta_z)\n', (945, 954), False, 'import math\n'), ((993, 1010), 'math.cos', 'math.cos', (['theta_z'], {}), '(theta_z)\n', (1001, 1010), False, 'import math\n'), ((1067, 1084), 'math.sin', 'math.sin', (['theta_y'], {}), '(theta_y)\n', (1075, 1084), False, 'import math\n'), ((1085, 1102), 'math.sin', 'math.sin', (['theta_z'], {}), '(theta_z)\n', (1093, 1102), False, 'import math\n'), ((1141, 1158), 'math.cos', 'math.cos', (['theta_z'], {}), '(theta_z)\n', (1149, 1158), False, 'import math\n'), ((1170, 1187), 'math.cos', 'math.cos', (['theta_x'], {}), '(theta_x)\n', (1178, 1187), False, 'import math\n'), ((705, 722), 'math.sin', 'math.sin', (['theta_x'], {}), '(theta_x)\n', (713, 722), False, 'import math\n'), ((723, 740), 'math.sin', 'math.sin', (['theta_y'], {}), '(theta_y)\n', (731, 740), False, 'import math\n'), ((855, 872), 'math.sin', 'math.sin', (['theta_x'], {}), '(theta_x)\n', (863, 872), False, 'import math\n'), ((873, 890), 'math.cos', 'math.cos', (['theta_y'], {}), '(theta_y)\n', (881, 890), False, 'import math\n'), ((957, 974), 'math.sin', 'math.sin', (['theta_x'], {}), '(theta_x)\n', (965, 974), False, 'import math\n'), ((975, 992), 'math.sin', 'math.sin', (['theta_y'], {}), '(theta_y)\n', (983, 992), False, 'import math\n'), ((1105, 1122), 'math.sin', 'math.sin', (['theta_x'], {}), '(theta_x)\n', (1113, 1122), False, 'import math\n'), ((1123, 1140), 'math.cos', 'math.cos', (['theta_y'], {}), '(theta_y)\n', (1131, 1140), False, 'import math\n')]
|
import numpy as np
import cv2, time, multiprocessing
import matplotlib.pyplot as plt
def transform(x, y, orgX, orgY):
c = complex(x - orgX, y - orgY)
return c ** 1.2
const = np.array([256, 256, 256], np.int16)
def toMatrix(newDict):
global const
arrs = newDict.keys()
xRange = max(arrs, key=lambda x: x[0])[0] - min(arrs, key=lambda x: x[0])[0]
yRange = max(arrs, key=lambda x: x[1])[1] - min(arrs, key=lambda x: x[1])[1]
print("Rendering image of size {}x{}...".format(xRange, yRange))
shiftX = xRange // 2
shiftY = yRange // 2
imgArr = np.zeros((yRange, xRange, 3), np.int16)
for x in range(xRange):
for y in range(yRange):
imgArr[y, x, :] = np.array(newDict.get((x - shiftX, y - shiftY), [255, 255, 255]), np.int16)
return const - imgArr
def bgrTorgb(img):
img_rgb = np.zeros(img.shape, img.dtype)
img_rgb[:, :, 0] = img[:, :, 2]
img_rgb[:, :, 1] = img[:, :, 1]
img_rgb[:, :, 2] = img[:, :, 0]
return img_rgb
def show(ori, img):
plt.subplot(121)
plt.title('Original Image')
plt.imshow(bgrTorgb(ori))
plt.subplot(122)
plt.title('Destination Image')
plt.imshow(bgrTorgb(img))
plt.show()
def avPixels(newImg, m, n, bgr, c):
a = round(m)
b = round(n)
for i in range(a - c, a + c):
for j in range(b - c, b + c):
if newImg.get((i, j)) is None:
newImg[(i, j)] = bgr
def calculateSparseArray(img, wStart, wEnd, h, orgX, orgY, kernel):
c = kernel // 2
newImg = {}
for x in range(wStart, wEnd):
for y in range(h):
xy = transform(x, y, orgX, orgY)
avPixels(newImg, xy.real, xy.imag, img[y, x, :], c)
return newImg
def main():
img = cv2.imread("pics/5.png")
t = time.clock()
height, width = img.shape[0:2]
orgX, orgY = (width // 2, height // 2)
kernel = 7
threads = 6
wPart = width // threads
results = []
pool = multiprocessing.Pool(processes=threads)
for i in range(threads - 1):
results.append(
pool.apply_async(calculateSparseArray, (img, wPart * i, wPart * (i + 1), height, orgX, orgY, kernel,)))
results.append(
pool.apply_async(calculateSparseArray, (img, wPart * (threads - 1), width, height, orgX, orgY, kernel,)))
pool.close()
pool.join()
print('It takes {}s to calculate the sparse matrices, with kernel of size {}x{}, using {} threads'
.format(round(time.clock() - t, 2), kernel, kernel, threads))
t = time.clock()
d1 = results[0].get()
for i in range(1, len(results)):
d1.update(results[i].get())
print('It takes {}s to merge the sparse matrices'.format(round(time.clock() - t, 2)))
t = time.clock()
imgArr = toMatrix(d1)
print('It takes {}s to convert sparse matrices to a complete numpy three dimensional array'.format(
round(time.clock() - t, 2)))
show(img, imgArr)
if __name__ == "__main__":
main()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show",
"numpy.zeros",
"time.clock",
"cv2.imread",
"numpy.array",
"multiprocessing.Pool"
] |
[((186, 221), 'numpy.array', 'np.array', (['[256, 256, 256]', 'np.int16'], {}), '([256, 256, 256], np.int16)\n', (194, 221), True, 'import numpy as np\n'), ((582, 621), 'numpy.zeros', 'np.zeros', (['(yRange, xRange, 3)', 'np.int16'], {}), '((yRange, xRange, 3), np.int16)\n', (590, 621), True, 'import numpy as np\n'), ((848, 878), 'numpy.zeros', 'np.zeros', (['img.shape', 'img.dtype'], {}), '(img.shape, img.dtype)\n', (856, 878), True, 'import numpy as np\n'), ((1032, 1048), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(121)'], {}), '(121)\n', (1043, 1048), True, 'import matplotlib.pyplot as plt\n'), ((1053, 1080), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Image"""'], {}), "('Original Image')\n", (1062, 1080), True, 'import matplotlib.pyplot as plt\n'), ((1115, 1131), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(122)'], {}), '(122)\n', (1126, 1131), True, 'import matplotlib.pyplot as plt\n'), ((1136, 1166), 'matplotlib.pyplot.title', 'plt.title', (['"""Destination Image"""'], {}), "('Destination Image')\n", (1145, 1166), True, 'import matplotlib.pyplot as plt\n'), ((1201, 1211), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1209, 1211), True, 'import matplotlib.pyplot as plt\n'), ((1753, 1777), 'cv2.imread', 'cv2.imread', (['"""pics/5.png"""'], {}), "('pics/5.png')\n", (1763, 1777), False, 'import cv2, time, multiprocessing\n'), ((1786, 1798), 'time.clock', 'time.clock', ([], {}), '()\n', (1796, 1798), False, 'import cv2, time, multiprocessing\n'), ((1965, 2004), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {'processes': 'threads'}), '(processes=threads)\n', (1985, 2004), False, 'import cv2, time, multiprocessing\n'), ((2528, 2540), 'time.clock', 'time.clock', ([], {}), '()\n', (2538, 2540), False, 'import cv2, time, multiprocessing\n'), ((2738, 2750), 'time.clock', 'time.clock', ([], {}), '()\n', (2748, 2750), False, 'import cv2, time, multiprocessing\n'), ((2472, 2484), 'time.clock', 'time.clock', ([], {}), '()\n', (2482, 2484), False, 'import cv2, time, multiprocessing\n'), ((2707, 2719), 'time.clock', 'time.clock', ([], {}), '()\n', (2717, 2719), False, 'import cv2, time, multiprocessing\n'), ((2895, 2907), 'time.clock', 'time.clock', ([], {}), '()\n', (2905, 2907), False, 'import cv2, time, multiprocessing\n')]
|
import unittest
from numpy import array
from lda import VariationalBayes
from scipy.special import psi as digam
from math import exp
class TestVB(unittest.TestCase):
def setUp(self):
self.init_beta = array([[.26, .185, .185, .185, .185],
[.185, .185, .26, .185, .185],
[.185, .185, .185, .26, .185]])
def test_single_phi(self):
vb = VariationalBayes()
gamma = array([2.0, 2.0, 2.0])
beta = self.init_beta
phi = vb.new_phi(gamma, beta, 0, 1)
prop = 0.27711205238850234
normalizer = sum(x * prop for x in beta[:, 0])
self.assertAlmostEqual(phi[0], beta[0][0] * prop / normalizer)
self.assertAlmostEqual(phi[1], beta[1][0] * prop / normalizer)
self.assertAlmostEqual(phi[2], beta[2][0] * prop / normalizer)
def test_multiple_phi(self):
vb = VariationalBayes()
gamma = array([2.0, 2.0, 2.0])
beta = self.init_beta
phi = vb.new_phi(gamma, beta, 0, 2)
prop = 0.27711205238850234
normalizer = sum(x * prop for x in beta[:, 0]) / 2.0
self.assertAlmostEqual(phi[0], beta[0][0] * prop / normalizer)
self.assertAlmostEqual(phi[1], beta[1][0] * prop / normalizer)
self.assertAlmostEqual(phi[2], beta[2][0] * prop / normalizer)
def test_m(self):
vb = VariationalBayes()
vb.init([], "stuck", 3)
topic_count = array([[5., 4., 3., 2., 1.],
[0., 2., 2., 4., 1.],
[1., 1., 1., 1., 1.]])
new_beta = vb.m_step(topic_count)
self.assertAlmostEqual(new_beta[2][3], .2)
self.assertAlmostEqual(new_beta[0][0], .33333333)
self.assertAlmostEqual(new_beta[1][4], .11111111)
self.assertAlmostEqual(new_beta[0][3], .13333333)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"numpy.array",
"lda.VariationalBayes"
] |
[((1896, 1911), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1909, 1911), False, 'import unittest\n'), ((216, 336), 'numpy.array', 'array', (['[[0.26, 0.185, 0.185, 0.185, 0.185], [0.185, 0.185, 0.26, 0.185, 0.185], [\n 0.185, 0.185, 0.185, 0.26, 0.185]]'], {}), '([[0.26, 0.185, 0.185, 0.185, 0.185], [0.185, 0.185, 0.26, 0.185, \n 0.185], [0.185, 0.185, 0.185, 0.26, 0.185]])\n', (221, 336), False, 'from numpy import array\n'), ((426, 444), 'lda.VariationalBayes', 'VariationalBayes', ([], {}), '()\n', (442, 444), False, 'from lda import VariationalBayes\n'), ((462, 484), 'numpy.array', 'array', (['[2.0, 2.0, 2.0]'], {}), '([2.0, 2.0, 2.0])\n', (467, 484), False, 'from numpy import array\n'), ((910, 928), 'lda.VariationalBayes', 'VariationalBayes', ([], {}), '()\n', (926, 928), False, 'from lda import VariationalBayes\n'), ((946, 968), 'numpy.array', 'array', (['[2.0, 2.0, 2.0]'], {}), '([2.0, 2.0, 2.0])\n', (951, 968), False, 'from numpy import array\n'), ((1390, 1408), 'lda.VariationalBayes', 'VariationalBayes', ([], {}), '()\n', (1406, 1408), False, 'from lda import VariationalBayes\n'), ((1464, 1556), 'numpy.array', 'array', (['[[5.0, 4.0, 3.0, 2.0, 1.0], [0.0, 2.0, 2.0, 4.0, 1.0], [1.0, 1.0, 1.0, 1.0,\n 1.0]]'], {}), '([[5.0, 4.0, 3.0, 2.0, 1.0], [0.0, 2.0, 2.0, 4.0, 1.0], [1.0, 1.0, 1.0,\n 1.0, 1.0]])\n', (1469, 1556), False, 'from numpy import array\n')]
|
import math
import cv2
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.keras.datasets import mnist
def load_img(path):
img = cv2.imread(path, cv2.IMREAD_GRAYSCALE)
return img
def preprocess_img(img, size, invert_colors=False):
if invert_colors:
img = cv2.bitwise_not(img)
img = cv2.resize(img, dsize=size, interpolation=cv2.INTER_CUBIC)
img = img / 255
img = np.expand_dims(img, axis=0)
img = np.expand_dims(img, axis=3)
return img
def load_data():
(X_train, Y_train), (X_test, Y_test) = mnist.load_data()
return X_train, Y_train, X_test, Y_test
def visualize_data(X, Y, plot_name):
plt.subplot(221)
plt.imshow(X[0], cmap=plt.get_cmap("gray"))
plt.title("y: " + str(Y[0]))
plt.subplot(222)
plt.imshow(X[1], cmap=plt.get_cmap("gray"))
plt.title("y: " + str(Y[1]))
plt.subplot(223)
plt.imshow(X[2], cmap=plt.get_cmap("gray"))
plt.title("y: " + str(Y[2]))
plt.subplot(224)
plt.imshow(X[3], cmap=plt.get_cmap("gray"))
plt.title("y: " + str(Y[3]))
plt.savefig("output/" + plot_name, bbox_inches="tight")
plt.clf()
def preprocess_data(X_train, Y_train, X_test, Y_test):
# Normalize image pixel values from 0-255 to 0-1
X_train = X_train / 255
X_test = X_test / 255
# Change y values from 0-9 to one hot vectors
Y_train = convert_to_one_hot(Y_train)
Y_test = convert_to_one_hot(Y_test)
# Add channels dimension
X_train = np.expand_dims(X_train, axis=3)
X_test = np.expand_dims(X_test, axis=3)
return X_train, Y_train, X_test, Y_test
def convert_to_one_hot(Y):
Y_onehot = np.zeros((len(Y), Y.max() + 1))
Y_onehot[np.arange(len(Y)), Y] = 1
return Y_onehot
def random_mini_batches(X_train, Y_train, mini_batch_size):
mini_batches = []
m = X_train.shape[0] # Number of training examples
# Shuffle training examples
permutation = list(np.random.permutation(m))
X_shuffled = X_train[permutation]
Y_shuffled = Y_train[permutation]
# Partition into mini-batches
num_complete_mini_batches = math.floor(m / mini_batch_size)
for i in range(num_complete_mini_batches):
X_mini_batch = X_shuffled[i * mini_batch_size : (i + 1) * mini_batch_size]
Y_mini_batch = Y_shuffled[i * mini_batch_size : (i + 1) * mini_batch_size]
mini_batch = (X_mini_batch, Y_mini_batch)
mini_batches.append(mini_batch)
# Handling the case that the last mini-batch < mini_batch_size
if m % mini_batch_size != 0:
X_mini_batch = X_shuffled[num_complete_mini_batches * mini_batch_size : m]
Y_mini_batch = Y_shuffled[num_complete_mini_batches * mini_batch_size : m]
mini_batch = (X_mini_batch, Y_mini_batch)
mini_batches.append(mini_batch)
return mini_batches
def compute_accuracy(Y_pred, Y_real):
Y_pred = np.argmax(Y_pred, axis=1)
Y_real = np.argmax(Y_real, axis=1)
num_correct = np.sum(Y_pred == Y_real)
accuracy = num_correct / Y_real.shape[0]
return accuracy
def compute_cost(Y, Y_hat):
# Add small value epsilon to tf.log() calls to avoid taking the log of 0
epsilon = 1e-10
J = tf.reduce_mean(-tf.reduce_sum(Y * tf.log(Y_hat + epsilon), axis=1), name="J")
return J
def create_model(height, width, channels, num_classes):
tf.reset_default_graph()
X = tf.placeholder(dtype=tf.float32, shape=(None, height, width, channels), name="X")
Y = tf.placeholder(dtype=tf.float32, shape=(None, num_classes), name="Y")
training_flag = tf.placeholder_with_default(False, shape=())
conv1 = tf.layers.conv2d(X, filters=32, kernel_size=5, strides=1, padding="same", activation=tf.nn.relu)
pool1 = tf.layers.max_pooling2d(conv1, pool_size=2, strides=2, padding="valid")
# Dropout does not apply by default, training=True is needed to make the layer do anything
# We only want dropout applied during training
dropout = tf.layers.dropout(pool1, rate=0.2, training=training_flag)
flatten = tf.layers.flatten(dropout)
dense1 = tf.layers.dense(flatten, 128, activation=tf.nn.relu)
Y_hat = tf.layers.dense(dense1, num_classes, activation=tf.nn.softmax, name="Y_hat")
# Compute cost
J = compute_cost(Y, Y_hat)
return X, Y, training_flag, Y_hat, J
def run_model(X, Y, training_flag, Y_hat, J, X_train, Y_train, X_test, Y_test, mini_batches, LEARNING_RATE, NUM_EPOCHS):
# Create train op
optimizer = tf.train.AdamOptimizer(LEARNING_RATE)
train_op = optimizer.minimize(J)
# Start session
with tf.Session() as sess:
# Initialize variables
sess.run(tf.global_variables_initializer())
# Training loop
for epoch in range(NUM_EPOCHS):
for (X_mini_batch, Y_mini_batch) in mini_batches:
_, J_train = sess.run([train_op, J], feed_dict={X: X_mini_batch, Y: Y_mini_batch, training_flag: True})
print("epoch: " + str(epoch) + ", J_train: " + str(J_train))
# Final costs
J_train = sess.run(J, feed_dict={X: X_train, Y: Y_train})
J_test = sess.run(J, feed_dict={X: X_test, Y: Y_test})
# Compute training accuracy
Y_pred = sess.run(Y_hat, feed_dict={X: X_train, Y: Y_train})
accuracy_train = compute_accuracy(Y_pred, Y_train)
# Compute test accuracy
Y_pred = sess.run(Y_hat, feed_dict={X: X_test, Y: Y_test})
accuracy_test = compute_accuracy(Y_pred, Y_test)
# Save model
tf.saved_model.simple_save(sess, "saved_model", inputs={"X": X, "Y": Y}, outputs={"Y_hat": Y_hat})
return J_train, J_test, accuracy_train, accuracy_test
|
[
"numpy.sum",
"matplotlib.pyplot.clf",
"numpy.argmax",
"tensorflow.reset_default_graph",
"tensorflow.layers.max_pooling2d",
"tensorflow.saved_model.simple_save",
"tensorflow.placeholder_with_default",
"tensorflow.placeholder",
"cv2.resize",
"cv2.bitwise_not",
"matplotlib.pyplot.get_cmap",
"tensorflow.global_variables_initializer",
"tensorflow.layers.dropout",
"tensorflow.layers.flatten",
"tensorflow.Session",
"tensorflow.layers.conv2d",
"tensorflow.log",
"numpy.random.permutation",
"matplotlib.pyplot.subplot",
"tensorflow.layers.dense",
"math.floor",
"numpy.expand_dims",
"tensorflow.keras.datasets.mnist.load_data",
"cv2.imread",
"tensorflow.train.AdamOptimizer",
"matplotlib.pyplot.savefig"
] |
[((173, 211), 'cv2.imread', 'cv2.imread', (['path', 'cv2.IMREAD_GRAYSCALE'], {}), '(path, cv2.IMREAD_GRAYSCALE)\n', (183, 211), False, 'import cv2\n'), ((348, 406), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': 'size', 'interpolation': 'cv2.INTER_CUBIC'}), '(img, dsize=size, interpolation=cv2.INTER_CUBIC)\n', (358, 406), False, 'import cv2\n'), ((437, 464), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (451, 464), True, 'import numpy as np\n'), ((475, 502), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(3)'}), '(img, axis=3)\n', (489, 502), True, 'import numpy as np\n'), ((580, 597), 'tensorflow.keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (595, 597), False, 'from tensorflow.keras.datasets import mnist\n'), ((685, 701), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(221)'], {}), '(221)\n', (696, 701), True, 'import matplotlib.pyplot as plt\n'), ((787, 803), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(222)'], {}), '(222)\n', (798, 803), True, 'import matplotlib.pyplot as plt\n'), ((889, 905), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(223)'], {}), '(223)\n', (900, 905), True, 'import matplotlib.pyplot as plt\n'), ((991, 1007), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(224)'], {}), '(224)\n', (1002, 1007), True, 'import matplotlib.pyplot as plt\n'), ((1093, 1148), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('output/' + plot_name)"], {'bbox_inches': '"""tight"""'}), "('output/' + plot_name, bbox_inches='tight')\n", (1104, 1148), True, 'import matplotlib.pyplot as plt\n'), ((1153, 1162), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (1160, 1162), True, 'import matplotlib.pyplot as plt\n'), ((1503, 1534), 'numpy.expand_dims', 'np.expand_dims', (['X_train'], {'axis': '(3)'}), '(X_train, axis=3)\n', (1517, 1534), True, 'import numpy as np\n'), ((1548, 1578), 'numpy.expand_dims', 'np.expand_dims', (['X_test'], {'axis': '(3)'}), '(X_test, axis=3)\n', (1562, 1578), True, 'import numpy as np\n'), ((2122, 2153), 'math.floor', 'math.floor', (['(m / mini_batch_size)'], {}), '(m / mini_batch_size)\n', (2132, 2153), False, 'import math\n'), ((2891, 2916), 'numpy.argmax', 'np.argmax', (['Y_pred'], {'axis': '(1)'}), '(Y_pred, axis=1)\n', (2900, 2916), True, 'import numpy as np\n'), ((2930, 2955), 'numpy.argmax', 'np.argmax', (['Y_real'], {'axis': '(1)'}), '(Y_real, axis=1)\n', (2939, 2955), True, 'import numpy as np\n'), ((2974, 2998), 'numpy.sum', 'np.sum', (['(Y_pred == Y_real)'], {}), '(Y_pred == Y_real)\n', (2980, 2998), True, 'import numpy as np\n'), ((3352, 3376), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (3374, 3376), True, 'import tensorflow as tf\n'), ((3386, 3471), 'tensorflow.placeholder', 'tf.placeholder', ([], {'dtype': 'tf.float32', 'shape': '(None, height, width, channels)', 'name': '"""X"""'}), "(dtype=tf.float32, shape=(None, height, width, channels),\n name='X')\n", (3400, 3471), True, 'import tensorflow as tf\n'), ((3476, 3545), 'tensorflow.placeholder', 'tf.placeholder', ([], {'dtype': 'tf.float32', 'shape': '(None, num_classes)', 'name': '"""Y"""'}), "(dtype=tf.float32, shape=(None, num_classes), name='Y')\n", (3490, 3545), True, 'import tensorflow as tf\n'), ((3566, 3610), 'tensorflow.placeholder_with_default', 'tf.placeholder_with_default', (['(False)'], {'shape': '()'}), '(False, shape=())\n', (3593, 3610), True, 'import tensorflow as tf\n'), ((3624, 3724), 'tensorflow.layers.conv2d', 'tf.layers.conv2d', (['X'], {'filters': '(32)', 'kernel_size': '(5)', 'strides': '(1)', 'padding': '"""same"""', 'activation': 'tf.nn.relu'}), "(X, filters=32, kernel_size=5, strides=1, padding='same',\n activation=tf.nn.relu)\n", (3640, 3724), True, 'import tensorflow as tf\n'), ((3733, 3804), 'tensorflow.layers.max_pooling2d', 'tf.layers.max_pooling2d', (['conv1'], {'pool_size': '(2)', 'strides': '(2)', 'padding': '"""valid"""'}), "(conv1, pool_size=2, strides=2, padding='valid')\n", (3756, 3804), True, 'import tensorflow as tf\n'), ((3965, 4023), 'tensorflow.layers.dropout', 'tf.layers.dropout', (['pool1'], {'rate': '(0.2)', 'training': 'training_flag'}), '(pool1, rate=0.2, training=training_flag)\n', (3982, 4023), True, 'import tensorflow as tf\n'), ((4039, 4065), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['dropout'], {}), '(dropout)\n', (4056, 4065), True, 'import tensorflow as tf\n'), ((4079, 4131), 'tensorflow.layers.dense', 'tf.layers.dense', (['flatten', '(128)'], {'activation': 'tf.nn.relu'}), '(flatten, 128, activation=tf.nn.relu)\n', (4094, 4131), True, 'import tensorflow as tf\n'), ((4144, 4220), 'tensorflow.layers.dense', 'tf.layers.dense', (['dense1', 'num_classes'], {'activation': 'tf.nn.softmax', 'name': '"""Y_hat"""'}), "(dense1, num_classes, activation=tf.nn.softmax, name='Y_hat')\n", (4159, 4220), True, 'import tensorflow as tf\n'), ((4474, 4511), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['LEARNING_RATE'], {}), '(LEARNING_RATE)\n', (4496, 4511), True, 'import tensorflow as tf\n'), ((317, 337), 'cv2.bitwise_not', 'cv2.bitwise_not', (['img'], {}), '(img)\n', (332, 337), False, 'import cv2\n'), ((1953, 1977), 'numpy.random.permutation', 'np.random.permutation', (['m'], {}), '(m)\n', (1974, 1977), True, 'import numpy as np\n'), ((4579, 4591), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (4589, 4591), True, 'import tensorflow as tf\n'), ((5508, 5610), 'tensorflow.saved_model.simple_save', 'tf.saved_model.simple_save', (['sess', '"""saved_model"""'], {'inputs': "{'X': X, 'Y': Y}", 'outputs': "{'Y_hat': Y_hat}"}), "(sess, 'saved_model', inputs={'X': X, 'Y': Y},\n outputs={'Y_hat': Y_hat})\n", (5534, 5610), True, 'import tensorflow as tf\n'), ((728, 748), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (740, 748), True, 'import matplotlib.pyplot as plt\n'), ((830, 850), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (842, 850), True, 'import matplotlib.pyplot as plt\n'), ((932, 952), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (944, 952), True, 'import matplotlib.pyplot as plt\n'), ((1034, 1054), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (1046, 1054), True, 'import matplotlib.pyplot as plt\n'), ((4649, 4682), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (4680, 4682), True, 'import tensorflow as tf\n'), ((3233, 3256), 'tensorflow.log', 'tf.log', (['(Y_hat + epsilon)'], {}), '(Y_hat + epsilon)\n', (3239, 3256), True, 'import tensorflow as tf\n')]
|
""" Data helper functions """
import os
import random
import numpy as np
import PIL
from tensorflow.keras.preprocessing.image import load_img, img_to_array
from tensorflow.keras.utils import to_categorical
def load_data(directory, classes, rescale=True, preprocess=None, verbose=False):
""" Helper function to load data in a Keras friendly format """
if not os.path.exists(directory):
raise FileNotFoundError(directory + ' not found')
# Get directories
directories = os.listdir(directory)
# Count files
num_images = 0
for d in directories:
if d not in classes or not os.path.isdir(os.path.join(directory, d)):
continue
num_images += len([name for name in os.listdir(os.path.join(directory, d)) if os.path.isfile(os.path.join(os.path.join(directory, d), name))])
# Create numpy array with the correct size (pending actually loading images)
x = np.empty((num_images, 256, 256, 3), dtype='float32')
y = list()
filen = 0
failed = 0
for d in directories:
# Skip any class directories we don't want
if d not in classes or not os.path.isdir(os.path.join(directory, d)):
if verbose:
print('Skipping', d)
continue
if verbose:
print('Loading directory', d)
for f in os.listdir(os.path.join(directory, d)):
try:
# Load image
img = load_img(
os.path.join(os.path.join(directory, d), f),
color_mode='rgb',
target_size=[256, 256]
)
except PIL.UnidentifiedImageError:
failed += 1
if verbose:
print('Failed to load {}'.format(os.path.join(os.path.join(directory, d), f)))
continue
# Convert to numpy array
img = img_to_array(img)
# Apply any preprocess function and rescaling
if preprocess is not None:
img = preprocess(img)
else:
if rescale:
img /= 255
# Append label to y
y.append(classes.index(d))
# Save img to x
x[filen, ...] = img
# Increment img number
filen += 1
# Remove empty rows of x due to failed image reads
if failed > 0:
x = x[:-failed,...]
# Convert y to categorical
y = to_categorical(y)
return x, y, 0
|
[
"tensorflow.keras.utils.to_categorical",
"numpy.empty",
"tensorflow.keras.preprocessing.image.img_to_array",
"os.path.exists",
"os.path.join",
"os.listdir"
] |
[((495, 516), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (505, 516), False, 'import os\n'), ((921, 973), 'numpy.empty', 'np.empty', (['(num_images, 256, 256, 3)'], {'dtype': '"""float32"""'}), "((num_images, 256, 256, 3), dtype='float32')\n", (929, 973), True, 'import numpy as np\n'), ((2488, 2505), 'tensorflow.keras.utils.to_categorical', 'to_categorical', (['y'], {}), '(y)\n', (2502, 2505), False, 'from tensorflow.keras.utils import to_categorical\n'), ((369, 394), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (383, 394), False, 'import os\n'), ((1348, 1374), 'os.path.join', 'os.path.join', (['directory', 'd'], {}), '(directory, d)\n', (1360, 1374), False, 'import os\n'), ((1902, 1919), 'tensorflow.keras.preprocessing.image.img_to_array', 'img_to_array', (['img'], {}), '(img)\n', (1914, 1919), False, 'from tensorflow.keras.preprocessing.image import load_img, img_to_array\n'), ((630, 656), 'os.path.join', 'os.path.join', (['directory', 'd'], {}), '(directory, d)\n', (642, 656), False, 'import os\n'), ((1145, 1171), 'os.path.join', 'os.path.join', (['directory', 'd'], {}), '(directory, d)\n', (1157, 1171), False, 'import os\n'), ((735, 761), 'os.path.join', 'os.path.join', (['directory', 'd'], {}), '(directory, d)\n', (747, 761), False, 'import os\n'), ((1488, 1514), 'os.path.join', 'os.path.join', (['directory', 'd'], {}), '(directory, d)\n', (1500, 1514), False, 'import os\n'), ((794, 820), 'os.path.join', 'os.path.join', (['directory', 'd'], {}), '(directory, d)\n', (806, 820), False, 'import os\n'), ((1788, 1814), 'os.path.join', 'os.path.join', (['directory', 'd'], {}), '(directory, d)\n', (1800, 1814), False, 'import os\n')]
|
"""
GPMF
A Generalized Poroelastic Model using FEniCS
Code for a fully-coupled poroelastic formulation: This solves the three
PDEs (conservation of mass, darcy's law, conservation of momentum) using
a Mixed Finite Element approach (i.e. a monolithic solve). The linear
model uses a constant fluid density and porosity. The nonlinear model uses
a d(phi)/dt porosity model to update porosity in each time step, which is
derived from solid continuity assuming small strains. Fluid density is also
allowed to vary.
Primary unkowns are considered to be perturbations from the litho- and
hydro-static conditions, as is common in poroelasticity. Pressure or
stress/strain dependent variables are related to either the perturbation
or the total value (i.e. perturbation + static condition).
Refer to the User Guide for more information on each section of the code
below.
"""
######### INITIALIZE CODE #################################################
from fenics import *
import numpy as np
import ufl
from time import gmtime, strftime
ufl.algorithms.apply_derivatives.CONDITIONAL_WORKAROUND = True
if MPI.rank(MPI.comm_world) == 0:
print( "Start date and time:")
print( strftime("%Y-%m-%d %H:%M:%S", gmtime()))
###########################################################################
######### DEFINE MODEL TYPE ###############################################
Linear = 0
Nonlinear = 1
if Linear + Nonlinear != 1:
if MPI.rank(MPI.comm_world) == 0:
print("You must choose a model type.")
quit()
if Linear == 1:
if MPI.rank(MPI.comm_world) == 0:
print("Running the Linear Poroelastic Model.")
weight = Constant(0.0)
linear_flag = 1
if Nonlinear == 1:
if MPI.rank(MPI.comm_world) == 0:
print( "Running the Nonlinear Poroelastic Model.")
weight = Constant(1.0)
linear_flag = 0
###########################################################################
######### DOMAIN AND SUBDOMAINS ###########################################
### Domain Constants ###
x0 = 0.0 # Minimum x
x1 = 1.0 # Maximum x
y0 = 0.0 # Minimum y
y1 = 1.0 # Maximum y
z0 = 0.0 # Minimum z
z1 = 1.0 # Maximum z
nx = 5 # Number of cells in x-direction
ny = 5 # Number of cells in y-direction
nz = 5 # Number of cells in z-direction
###########################################################################
######### MESHING #########################################################
if MPI.rank(MPI.comm_world) == 0:
print ('Building mesh...')
mesh = BoxMesh(Point(x0,y0,z0),Point(x1,y1,z1),nx,ny,nz)
###########################################################################
######### FUNCTION SPACES #################################################
### Field variable function spaces ###
ele_p = FiniteElement("DG", mesh.ufl_cell(), 0) # pressure
ele_q = FiniteElement("BDM", mesh.ufl_cell(), 1) # fluid flux
ele_us = VectorElement("CG", mesh.ufl_cell(), 1) # solid displacement
W = MixedElement([ele_p, ele_q, ele_us])
W = FunctionSpace(mesh, W)
### Function spaces for other variables ###
Z = FunctionSpace(mesh, "CG", 1)
V = VectorFunctionSpace(mesh, "DG", 0)
S = TensorFunctionSpace(mesh, "DG", 0)
P = FunctionSpace(mesh, "DG", 0)
###########################################################################
######### DEFINE PARAMETERS AND RELATIONSHIPS #############################
### Physical Constants ###
g = 9.81 # Gravity (m/sec^2)
### Hydraulic Parameters ###
k = 1.0 # Permeability (m^2)
mu = 1.0 # Viscosity (Pa*s)
phi_0 = 0.5 # Initial Porosity (-)
phi_min = 0.01 # Minimum allowed Porosity
p_0 = 0.0 # Reference Pressure (Pa)
rho_0 = 1.0 # Reference Density (kg/m^3)
### Mechanical Parameters ###
beta_m = 1.0 # Matrix Compressibility (Pa^-1)
beta_f = 1.0 # Fluid Compressibility (Pa^-1)
beta_s = 1.0 # Solid Compressibility (Pa^-1)
nu = 0.25 # Poisson's Ratio (-)
K = beta_m**(-1) # Bulk Modulus (Pa)
G = 3.0/2.0*K*(1.0-2.0*nu)/(1.0+nu) # Shear Modulus (Pa)
alpha = 1.0 - beta_s/beta_m # Biot Coefficient (-)
### Hydrostatic Condition ###
p_h = project(Expression('rho_0*g*(z1-x[2])',degree=1,rho_0=rho_0,g=g,\
z1=z1),Z)
### Solution dependent properties ###
# Total Stress
def sigma(us,p,alpha,K,G):
sigma_val = sigma_e(us,K,G) + alpha*p*Identity(len(us))
return sigma_val
# Effective Stress
def sigma_e(us,K,G):
sigma_e_val = -2.0*G*epsilon(us) - (K-2.0/3.0*G)*epsilon_v(us)*\
Identity(len(us))
return sigma_e_val
# Strain
def epsilon(us):
epsilon_val = 1.0/2.0*(grad(us)+grad(us).T)
return epsilon_val
# Volumetric Strain
def epsilon_v(us):
epsilon_v_val = div(us)
return epsilon_v_val
if Linear == 1:
# Fluid Density
def rho_f(p,p_0,p_h,rho_0,beta_f):
rho_val = Constant(rho_0)
return rho_val
# Porosity
def phi(alpha,us,p,beta_s,phi_0,phi_min,t):
phi_val = Constant(phi_0)
return phi_val
if Nonlinear == 1:
# Fluid Density
def rho_f(p,p_0,p_h,rho_0,beta_f):
rho_val = Constant(rho_0)*exp(Constant(beta_f)*(p+p_h-\
Constant(p_0)))
return rho_val
# Porosity
def phi(alpha,us,p,beta_s,phi_0,phi_min,t):
phi_val = alpha - (alpha-phi_0)*exp(-(epsilon_v(us)+beta_s*p))
phi_val = conditional(ge(phi_val,phi_min),phi_val,\
Constant(phi_min))
return phi_val
###########################################################################
######### TIME PARAMETERS ################################################
### Time parameters ###
tend = 1.0
nsteps = 10
dt = tend/nsteps
###########################################################################
######### INITIAL CONDITIONS #############################################
### Initial Condition ###
X_i = Expression(
(
"0.0", # p
"0.0","0.0","0.0", # (q1, q2, q3)
"0.0","0.0","0.0" # (us1, us2, us3)
),degree = 2)
X_n = interpolate(X_i,W)
# Initial porosity
phi_n = interpolate(Constant(phi_0),Z)
# Initial Picard solution estimate
X_m = interpolate(X_i,W)
###########################################################################
######### BOUNDARY CONDITIONS ############################################
### Boundary Conditions ###
class LeftBoundary(SubDomain):
def inside(self, x, on_boundary):
return near(x[0],x0)
left_boundary = LeftBoundary()
class RightBoundary(SubDomain):
def inside(self, x, on_boundary):
return near(x[0],x1)
right_boundary = RightBoundary()
class BackBoundary(SubDomain):
def inside(self, x, on_boundary):
return near(x[1],y0)
back_boundary = BackBoundary()
class FrontBoundary(SubDomain):
def inside(self, x, on_boundary):
return near(x[1],y1)
front_boundary = FrontBoundary()
class BottomBoundary(SubDomain):
def inside(self, x, on_boundary):
return near(x[2],z0)
bottom_boundary = BottomBoundary()
class TopBoundary(SubDomain):
def inside(self, x, on_boundary):
return near(x[2],z1)
top_boundary = TopBoundary()
boundary_facet_function = MeshFunction('size_t', mesh, 2)
boundary_facet_function.set_all(0)
left_boundary.mark(boundary_facet_function,1)
right_boundary.mark(boundary_facet_function,2)
back_boundary.mark(boundary_facet_function,3)
front_boundary.mark(boundary_facet_function,4)
bottom_boundary.mark(boundary_facet_function,5)
top_boundary.mark(boundary_facet_function,6)
def GetBoundaryConditions(t):
bcs = []
# Left Boundary
# # Flux Boundary (normal)
# bcs.append(DirichletBC(W.sub(1), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 1))
# # Displacement Boundary
# bcs.append(DirichletBC(W.sub(2), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 1))
# Right Boundary
# # Flux Boundary (normal)
# bcs.append(DirichletBC(W.sub(1), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 2))
# # Displacement Boundary
# bcs.append(DirichletBC(W.sub(2), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 2))
# Back Boundary
# # Flux Boundary (normal)
# bcs.append(DirichletBC(W.sub(1), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 3))
# # Displacement Boundary
# bcs.append(DirichletBC(W.sub(2), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 3))
# Front Boundary
# # Flux Boundary (normal)
# bcs.append(DirichletBC(W.sub(1), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 4))
# # Displacement Boundary
# bcs.append(DirichletBC(W.sub(2), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 4))
# Bottom Boundary
# # Flux Boundary (normal)
# bcs.append(DirichletBC(W.sub(1), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 5))
# # Displacement Boundary
# bcs.append(DirichletBC(W.sub(2), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 5))
# Top Boundary
# # Flux Boundary (normal)
# bcs.append(DirichletBC(W.sub(1), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 6))
# # Displacement Boundary
# bcs.append(DirichletBC(W.sub(2), Constant((0.0,0.0,0.0)), \
# boundary_facet_function, 6))
return bcs
###########################################################################
######### SOLVER SET-UP ##################################################
U = TrialFunction(W)
V = TestFunction(W)
n = FacetNormal(mesh)
norm = as_vector([n[0], n[1], n[2]])
ff = Constant(0.0) # fluid source
X = Function(W)
density_save = Function(Z)
porosity_save = Function(Z)
# dx = Measure("dx")(subdomain_data=***subdomain-face-function-name***)
ds = Measure("ds")(subdomain_data=boundary_facet_function)
# Function of ones to evaluate grad(rho_f) in weak form
ones_func = project(Constant(1.0),Z,solver_type='gmres')
def WeakForm(U,V,X_n,t):
p, q, us = split(U)
Pt, Qt, Ust = split(V)
p_n, q_n, us_n = split(X_n)
p_m, q_m, us_m = split(X_m)
### Weak Forms ###
# Conservation of Mass
CoMass_l_1 = rho_f(p_m,p_0,p_h,rho_0,beta_f)*Constant(alpha)\
*epsilon_v(us)*Pt*dx
CoMass_l_2 = rho_f(p_m,p_0,p_h,rho_0,beta_f)*((Constant(alpha)\
-phi(alpha,us_m,p_m,beta_s,phi_0,phi_min,t))*Constant(beta_s)\
+ phi(alpha,us_m,p_m,beta_s,phi_0,phi_min,t)*Constant(beta_f))*p\
*Pt*dx
CoMass_l_3 = dt*rho_f(p_m,p_0,p_h,rho_0,beta_f)*div(q)*Pt*dx
CoMass_l_4 = dt*Constant(weight)*inner(q,\
grad(rho_f(p_m,p_0,p_h,rho_0,beta_f)*ones_func))*Pt*dx
CoMass_l = CoMass_l_1 + CoMass_l_2 + CoMass_l_3 + CoMass_l_4
CoMass_r_1 = dt*ff*Pt*dx
CoMass_r_2 = rho_f(p_m,p_0,p_h,rho_0,beta_f)*Constant(alpha)\
*epsilon_v(us_n)*Pt*dx
CoMass_r_3 = rho_f(p_m,p_0,p_h,rho_0,beta_f)*((Constant(alpha)\
-phi(alpha,us_m,p_m,beta_s,phi_0,phi_min,t))*Constant(beta_s) \
+ phi(alpha,us_m,p_m,beta_s,phi_0,phi_min,t)*Constant(beta_f))*p_n\
*Pt*dx
CoMass_r = CoMass_r_1 + CoMass_r_2 + CoMass_r_3
# Darcy's Law
DL_l = mu/k*inner(q,Qt)*dx - p*div(Qt)*dx
# DL_r = -Constant(***pressure***)*inner(Qt,norm)*ds(-)
# Conservation of Momentum
CoMom_l = inner(sigma(us,p,alpha,K,G),grad(Ust))*dx
# CoMom_r = inner(Constant((0.0,***loading***)),Ust)*ds(-)
A = CoMass_l + DL_l + CoMom_l
B = CoMass_r #+ DL_r + CoMom_r
return A,B
def LinearSolver(U,V,X_n,t,bcs):
a,L = WeakForm(U,V,X_n,t)
A, b = assemble_system(a,L,bcs)
solve(A, X.vector(), b, 'mumps')
return X
###########################################################################
######### OUTPUT #########################################################
pressure_file = XDMFFile('General/pressure.xdmf')
flux_file = XDMFFile('General/flux.xdmf')
disp_file = XDMFFile('General/disp.xdmf')
density_file = XDMFFile('General/density.xdmf')
porosity_file = XDMFFile('General/porosity.xdmf')
###########################################################################
######### TIME LOOP #######################################################
t = 0.0
if MPI.rank(MPI.comm_world) == 0:
print ('Starting Time Loop...')
### Time Loop ###
for n in range(nsteps):
MPI.barrier(MPI.comm_world)
t += dt
if MPI.rank(MPI.comm_world) == 0:
print( "###############")
print( "")
print( "NEW TIME STEP")
print( "Time =", t)
print( "")
print( np.round(t/tend*100.0,6),"% Complete")
### Convergence criteria ###
reltol = 1E-4 # Relative error tolerance for Picard
rel_error_max_global = 9999 # Initialize relative error
max_iter = 100 # Maximum Picard iterations
omega = 1.0 # Relaxation coefficient
iter = 0
if linear_flag == 0:
if MPI.rank(MPI.comm_world) == 0:
print ("Entering Picard Iteration:")
print ("")
# Get boundary conditions for this time
bcs = GetBoundaryConditions(t)
### Picard Iteration Loop ###
while (rel_error_max_global > reltol):
if linear_flag == 0:
if MPI.rank(MPI.comm_world) == 0:
print ("ITERATE")
iter += 1
if linear_flag == 0:
if MPI.rank(MPI.comm_world) == 0:
print ("iteration = ", iter)
print ("")
### Solve ###
X = LinearSolver(U,V,X_n,t,bcs)
if MPI.rank(MPI.comm_world) == 0:
print( "")
if linear_flag == 0:
if MPI.rank(MPI.comm_world) == 0:
print ("Solved for a new solution estimate.")
print ("")
p, q, us = X.split(True)
p_m, q_m, us_m = X_m.split(True)
p_n, q_n, us_n = X_n.split(True)
# Evaluate for convergence of pressure solution on each processor
if linear_flag == 1:
rel_error_max_local = 0.0
if linear_flag == 0:
if MPI.rank(MPI.comm_world) == 0:
print ("Evaluate for Convergence")
print ("-------------")
cell_values_p = p.vector().get_local()
cell_values_p_m = p_m.vector().get_local()
rel_error_max_local = np.nanmax(np.divide(np.abs(cell_values_p \
- cell_values_p_m),np.abs(cell_values_p_m)))
# Find the global maximum value of the relative error
rel_error_max_global = MPI.max(MPI.comm_world,rel_error_max_local)
if MPI.rank(MPI.comm_world) == 0:
print( "Relative Error = ",rel_error_max_global)
print( "-------------")
print ("")
# Update estimate
X_new = X_m + omega*(X - X_m)
X_m.assign(X_new)
if iter == max_iter:
if MPI.rank(MPI.comm_world) == 0:
print( "Maximum iterations met")
print( "Solution doesn't converge")
quit()
if linear_flag == 0:
if MPI.rank(MPI.comm_world) == 0:
print( "The solution has converged.")
print ("Total iterations = ", iter)
print ("")
if MPI.rank(MPI.comm_world) == 0:
print( "Saving solutions.")
print ("")
pressure_file.write(p,t)
flux_file.write(q,t)
disp_file.write(us,t)
density_save.assign(project(rho_f(p,p_0,p_h,rho_0,beta_f),P))
porosity_save.assign(project(phi(alpha,us,p,beta_s,phi_0,phi_min,t),P))
density_file.write(density_save,t)
porosity_file.write(porosity_save,t)
# Update solution at last time step
X_n.assign(X)
phi_n.assign(project(phi(alpha,us,p,beta_s,phi_0,phi_min,t),P))
if MPI.rank(MPI.comm_world) == 0:
print ("Just updated last solution.")
print ("")
print( "###############")
print( "")
###########################################################################
if MPI.rank(MPI.comm_world) == 0:
print( "This code finished at")
print( strftime("%Y-%m-%d %H:%M:%S", gmtime()))
|
[
"time.gmtime",
"numpy.abs",
"numpy.round"
] |
[((1211, 1219), 'time.gmtime', 'gmtime', ([], {}), '()\n', (1217, 1219), False, 'from time import gmtime, strftime\n'), ((12883, 12912), 'numpy.round', 'np.round', (['(t / tend * 100.0)', '(6)'], {}), '(t / tend * 100.0, 6)\n', (12891, 12912), True, 'import numpy as np\n'), ((16374, 16382), 'time.gmtime', 'gmtime', ([], {}), '()\n', (16380, 16382), False, 'from time import gmtime, strftime\n'), ((14646, 14685), 'numpy.abs', 'np.abs', (['(cell_values_p - cell_values_p_m)'], {}), '(cell_values_p - cell_values_p_m)\n', (14652, 14685), True, 'import numpy as np\n'), ((14704, 14727), 'numpy.abs', 'np.abs', (['cell_values_p_m'], {}), '(cell_values_p_m)\n', (14710, 14727), True, 'import numpy as np\n')]
|
"""
General purpose math functions, mostly geometric in nature.
"""
import math
import numpy as np
from numpy.linalg import norm
from scipy.linalg import svd
from aspire.utils.random import Random
def cart2pol(x, y):
"""
Convert Cartesian to Polar Coordinates. All input arguments must be the same shape.
:param x: x-coordinate in Cartesian space
:param y: y-coordinate in Cartesian space
:return: A 2-tuple of values:
theta: angular coordinate/azimuth
r: radial distance from origin
"""
return np.arctan2(y, x), np.hypot(x, y)
def cart2sph(x, y, z):
"""
Transform cartesian coordinates to spherical. All input arguments must be the same shape.
:param x: X-values of input co-ordinates.
:param y: Y-values of input co-ordinates.
:param z: Z-values of input co-ordinates.
:return: A 3-tuple of values, all of the same shape as the inputs.
(<azimuth>, <elevation>, <radius>)
azimuth and elevation are returned in radians.
This function is equivalent to MATLAB's cart2sph function.
"""
hxy = np.hypot(x, y)
r = np.hypot(hxy, z)
el = np.arctan2(z, hxy)
az = np.arctan2(y, x)
return az, el, r
def _mgrid_slice(n, shifted, normalized):
"""
Util to generate a `slice` representing a 1d linspace
as expected by `np.mgrid`.
:param shifted: shifted by half of grid or not when n is even.
:param normalized: normalize the grid in the range of (-1, 1) or not.
:return: `slice` to be used by `np.mgrid`.
"""
num_points = n * 1j
start = -n // 2 + 1
end = n // 2
if shifted and n % 2 == 0:
start -= 1 / 2
end -= 1 / 2
elif n % 2 == 0:
start -= 1
end -= 1
if normalized:
# Compute the denominator for normalization
denom = n / 2
if shifted and n % 2 == 0:
denom -= 1 / 2
# Apply the normalization
start /= denom
end /= denom
return slice(start, end, num_points)
def grid_1d(n, shifted=False, normalized=True, dtype=np.float32):
"""
Generate one dimensional grid.
:param n: the number of grid points.
:param shifted: shifted by half of grid or not when n is even.
:param normalized: normalize the grid in the range of (-1, 1) or not.
:return: the rectangular and polar coordinates of all grid points.
"""
r = x = np.mgrid[_mgrid_slice(n, shifted, normalized)].astype(dtype)
return {"x": x, "r": r}
def grid_2d(n, shifted=False, normalized=True, indexing="yx", dtype=np.float32):
"""
Generate two dimensional grid.
:param n: the number of grid points in each dimension.
:param shifted: shifted by half of grid or not when n is even.
:param normalized: normalize the grid in the range of (-1, 1) or not.
:param indexing: 'yx' (C) or 'xy' (F), defaulting to 'yx'.
See https://numpy.org/doc/stable/reference/generated/numpy.meshgrid.html
:return: the rectangular and polar coordinates of all grid points.
"""
grid = _mgrid_slice(n, shifted, normalized)
y, x = np.mgrid[grid, grid].astype(dtype)
if indexing == "xy":
x, y = y, x
elif indexing != "yx":
raise RuntimeError(
f"grid_2d indexing {indexing} not supported." " Try 'xy' or 'yx'"
)
phi, r = cart2pol(x, y)
return {"x": x, "y": y, "phi": phi, "r": r}
def grid_3d(n, shifted=False, normalized=True, indexing="zyx", dtype=np.float32):
"""
Generate three dimensional grid.
:param n: the number of grid points in each dimension.
:param shifted: shifted by half of grid or not when n is even.
:param normalized: normalize the grid in the range of (-1, 1) or not.
:param indexing: 'zyx' (C) or 'xyz' (F), defaulting to 'zyx'.
See https://numpy.org/doc/stable/reference/generated/numpy.meshgrid.html
:return: the rectangular and spherical coordinates of all grid points.
"""
grid = _mgrid_slice(n, shifted, normalized)
z, y, x = np.mgrid[grid, grid, grid].astype(dtype)
if indexing == "xyz":
x, y, z = z, y, x
elif indexing != "zyx":
raise RuntimeError(
f"grid_3d indexing {indexing} not supported." " Try 'xyz' or 'zyx'"
)
phi, theta, r = cart2sph(x, y, z)
# TODO: Should this theta adjustment be moved inside cart2sph?
theta = np.pi / 2 - theta
return {"x": x, "y": y, "z": z, "phi": phi, "theta": theta, "r": r}
def uniform_random_angles(n, seed=None, dtype=np.float32):
"""
Generate random 3D rotation angles
:param n: The number of rotation angles to generate
:param seed: Random integer seed to use. If None, the current random state is used.
:return: A n-by-3 ndarray of rotation angles
"""
# Generate random rotation angles, in radians
with Random(seed):
angles = np.column_stack(
(
np.random.random(n) * 2 * np.pi,
np.arccos(2 * np.random.random(n) - 1),
np.random.random(n) * 2 * np.pi,
)
)
return angles.astype(dtype)
def register_rotations(rots, rots_ref):
"""
Register estimated orientations to reference ones.
Finds the orthogonal transformation that best aligns the estimated rotations
to the reference rotations.
:param rots: The rotations to be aligned in the form of a n-by-3-by-3 array.
:param rots_ref: The reference rotations to which we would like to align in
the form of a n-by-3-by-3 array.
:return: o_mat, optimal orthogonal 3x3 matrix to align the two sets;
flag, flag==1 then J conjugacy is required and 0 is not.
"""
assert (
rots.shape == rots_ref.shape
), "Two sets of rotations must have same dimensions."
K = rots.shape[0]
# Reflection matrix
J = np.array([[1, 0, 0], [0, 1, 0], [0, 0, -1]])
Q1 = np.zeros((3, 3), dtype=rots.dtype)
Q2 = np.zeros((3, 3), dtype=rots.dtype)
for k in range(K):
R = rots[k, :, :]
Rref = rots_ref[k, :, :]
Q1 = Q1 + R @ Rref.T
Q2 = Q2 + (J @ R @ J) @ Rref.T
# Compute the two possible orthogonal matrices which register the
# estimated rotations to the true ones.
Q1 = Q1 / K
Q2 = Q2 / K
# We are registering one set of rotations (the estimated ones) to
# another set of rotations (the true ones). Thus, the transformation
# matrix between the two sets of rotations should be orthogonal. This
# matrix is either Q1 if we recover the non-reflected solution, or Q2,
# if we got the reflected one. In any case, one of them should be
# orthogonal.
err1 = norm(Q1 @ Q1.T - np.eye(3, dtype=rots.dtype), ord="fro")
err2 = norm(Q2 @ Q2.T - np.eye(3, dtype=rots.dtype), ord="fro")
# In any case, enforce the registering matrix O to be a rotation.
if err1 < err2:
# Use Q1 as the registering matrix
U, _, V = svd(Q1)
flag = 0
else:
# Use Q2 as the registering matrix
U, _, V = svd(Q2)
flag = 1
Q_mat = U @ V
return Q_mat, flag
def get_aligned_rotations(rots, Q_mat, flag):
"""
Get aligned rotation matrices to reference ones.
Calculated aligned rotation matrices from the orthogonal transformation
that best aligns the estimated rotations to the reference rotations.
:param rots: The reference rotations to which we would like to align in
the form of a n-by-3-by-3 array.
:param Q_mat: optimal orthogonal 3x3 transformation matrix
:param flag: flag==1 then J conjugacy is required and 0 is not
:return: regrot, aligned rotation matrices
"""
K = rots.shape[0]
# Reflection matrix
J = np.array([[1, 0, 0], [0, 1, 0], [0, 0, -1]])
regrot = np.zeros_like(rots)
for k in range(K):
R = rots[k, :, :]
if flag == 1:
R = J @ R @ J
regrot[k, :, :] = Q_mat.T @ R
return regrot
def get_rots_mse(rots_reg, rots_ref):
"""
Calculate MSE between the estimated orientations to reference ones.
:param rots_reg: The estimated rotations after alignment in the form of
a n-by-3-by-3 array.
:param rots_ref: The reference rotations.
:return: The MSE value between two sets of rotations.
"""
assert (
rots_reg.shape == rots_ref.shape
), "Two sets of rotations must have same dimensions."
K = rots_reg.shape[0]
diff = np.zeros(K)
mse = 0
for k in range(K):
diff[k] = norm(rots_reg[k, :, :] - rots_ref[k, :, :], ord="fro")
mse += diff[k] ** 2
mse = mse / K
return mse
def common_line_from_rots(r1, r2, ell):
"""
Compute the common line induced by rotation matrices r1 and r2.
:param r1: The first rotation matrix of 3-by-3 array.
:param r2: The second rotation matrix of 3-by-3 array.
:param ell: The total number of common lines.
:return: The common line indices for both first and second rotations.
"""
assert r1.dtype == r2.dtype, "Ambiguous dtypes"
ut = np.dot(r2, r1.T)
alpha_ij = np.arctan2(ut[2, 0], -ut[2, 1]) + np.pi
alpha_ji = np.arctan2(ut[0, 2], -ut[1, 2]) + np.pi
ell_ij = alpha_ij * ell / (2 * np.pi)
ell_ji = alpha_ji * ell / (2 * np.pi)
ell_ij = int(np.mod(np.round(ell_ij), ell))
ell_ji = int(np.mod(np.round(ell_ji), ell))
return ell_ij, ell_ji
def crop_pad_2d(im, size, fill_value=0):
"""
:param im: A 2-dimensional numpy array
:param size: Integer size of cropped/padded output
:return: A numpy array of shape (size, size)
"""
im_y, im_x = im.shape
# shift terms
start_x = math.floor(im_x / 2) - math.floor(size / 2)
start_y = math.floor(im_y / 2) - math.floor(size / 2)
# cropping
if size <= min(im_y, im_x):
return im[start_y : start_y + size, start_x : start_x + size]
# padding
elif size >= max(im_y, im_x):
# ensure that we return in the same dtype as the input
to_return = fill_value * np.ones((size, size), dtype=im.dtype)
# when padding, start_x and start_y are negative since size is larger
# than im_x and im_y; the below line calculates where the original image
# is placed in relation to the (now-larger) box size
to_return[-start_y : im_y - start_y, -start_x : im_x - start_x] = im
return to_return
else:
# target size is between mat_x and mat_y
raise ValueError("Cannot crop and pad an image at the same time.")
|
[
"numpy.zeros_like",
"numpy.arctan2",
"numpy.eye",
"aspire.utils.random.Random",
"numpy.zeros",
"math.floor",
"numpy.ones",
"numpy.hypot",
"scipy.linalg.svd",
"numpy.random.random",
"numpy.array",
"numpy.linalg.norm",
"numpy.dot",
"numpy.round"
] |
[((1090, 1104), 'numpy.hypot', 'np.hypot', (['x', 'y'], {}), '(x, y)\n', (1098, 1104), True, 'import numpy as np\n'), ((1113, 1129), 'numpy.hypot', 'np.hypot', (['hxy', 'z'], {}), '(hxy, z)\n', (1121, 1129), True, 'import numpy as np\n'), ((1139, 1157), 'numpy.arctan2', 'np.arctan2', (['z', 'hxy'], {}), '(z, hxy)\n', (1149, 1157), True, 'import numpy as np\n'), ((1167, 1183), 'numpy.arctan2', 'np.arctan2', (['y', 'x'], {}), '(y, x)\n', (1177, 1183), True, 'import numpy as np\n'), ((5849, 5893), 'numpy.array', 'np.array', (['[[1, 0, 0], [0, 1, 0], [0, 0, -1]]'], {}), '([[1, 0, 0], [0, 1, 0], [0, 0, -1]])\n', (5857, 5893), True, 'import numpy as np\n'), ((5904, 5938), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {'dtype': 'rots.dtype'}), '((3, 3), dtype=rots.dtype)\n', (5912, 5938), True, 'import numpy as np\n'), ((5948, 5982), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {'dtype': 'rots.dtype'}), '((3, 3), dtype=rots.dtype)\n', (5956, 5982), True, 'import numpy as np\n'), ((7736, 7780), 'numpy.array', 'np.array', (['[[1, 0, 0], [0, 1, 0], [0, 0, -1]]'], {}), '([[1, 0, 0], [0, 1, 0], [0, 0, -1]])\n', (7744, 7780), True, 'import numpy as np\n'), ((7795, 7814), 'numpy.zeros_like', 'np.zeros_like', (['rots'], {}), '(rots)\n', (7808, 7814), True, 'import numpy as np\n'), ((8457, 8468), 'numpy.zeros', 'np.zeros', (['K'], {}), '(K)\n', (8465, 8468), True, 'import numpy as np\n'), ((9069, 9085), 'numpy.dot', 'np.dot', (['r2', 'r1.T'], {}), '(r2, r1.T)\n', (9075, 9085), True, 'import numpy as np\n'), ((544, 560), 'numpy.arctan2', 'np.arctan2', (['y', 'x'], {}), '(y, x)\n', (554, 560), True, 'import numpy as np\n'), ((562, 576), 'numpy.hypot', 'np.hypot', (['x', 'y'], {}), '(x, y)\n', (570, 576), True, 'import numpy as np\n'), ((4841, 4853), 'aspire.utils.random.Random', 'Random', (['seed'], {}), '(seed)\n', (4847, 4853), False, 'from aspire.utils.random import Random\n'), ((6951, 6958), 'scipy.linalg.svd', 'svd', (['Q1'], {}), '(Q1)\n', (6954, 6958), False, 'from scipy.linalg import svd\n'), ((7047, 7054), 'scipy.linalg.svd', 'svd', (['Q2'], {}), '(Q2)\n', (7050, 7054), False, 'from scipy.linalg import svd\n'), ((8522, 8576), 'numpy.linalg.norm', 'norm', (['(rots_reg[k, :, :] - rots_ref[k, :, :])'], {'ord': '"""fro"""'}), "(rots_reg[k, :, :] - rots_ref[k, :, :], ord='fro')\n", (8526, 8576), False, 'from numpy.linalg import norm\n'), ((9101, 9132), 'numpy.arctan2', 'np.arctan2', (['ut[2, 0]', '(-ut[2, 1])'], {}), '(ut[2, 0], -ut[2, 1])\n', (9111, 9132), True, 'import numpy as np\n'), ((9156, 9187), 'numpy.arctan2', 'np.arctan2', (['ut[0, 2]', '(-ut[1, 2])'], {}), '(ut[0, 2], -ut[1, 2])\n', (9166, 9187), True, 'import numpy as np\n'), ((9670, 9690), 'math.floor', 'math.floor', (['(im_x / 2)'], {}), '(im_x / 2)\n', (9680, 9690), False, 'import math\n'), ((9693, 9713), 'math.floor', 'math.floor', (['(size / 2)'], {}), '(size / 2)\n', (9703, 9713), False, 'import math\n'), ((9728, 9748), 'math.floor', 'math.floor', (['(im_y / 2)'], {}), '(im_y / 2)\n', (9738, 9748), False, 'import math\n'), ((9751, 9771), 'math.floor', 'math.floor', (['(size / 2)'], {}), '(size / 2)\n', (9761, 9771), False, 'import math\n'), ((6691, 6718), 'numpy.eye', 'np.eye', (['(3)'], {'dtype': 'rots.dtype'}), '(3, dtype=rots.dtype)\n', (6697, 6718), True, 'import numpy as np\n'), ((6759, 6786), 'numpy.eye', 'np.eye', (['(3)'], {'dtype': 'rots.dtype'}), '(3, dtype=rots.dtype)\n', (6765, 6786), True, 'import numpy as np\n'), ((9306, 9322), 'numpy.round', 'np.round', (['ell_ij'], {}), '(ell_ij)\n', (9314, 9322), True, 'import numpy as np\n'), ((9354, 9370), 'numpy.round', 'np.round', (['ell_ji'], {}), '(ell_ji)\n', (9362, 9370), True, 'import numpy as np\n'), ((10034, 10071), 'numpy.ones', 'np.ones', (['(size, size)'], {'dtype': 'im.dtype'}), '((size, size), dtype=im.dtype)\n', (10041, 10071), True, 'import numpy as np\n'), ((4919, 4938), 'numpy.random.random', 'np.random.random', (['n'], {}), '(n)\n', (4935, 4938), True, 'import numpy as np\n'), ((5024, 5043), 'numpy.random.random', 'np.random.random', (['n'], {}), '(n)\n', (5040, 5043), True, 'import numpy as np\n'), ((4982, 5001), 'numpy.random.random', 'np.random.random', (['n'], {}), '(n)\n', (4998, 5001), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
#Mixture of gaussians
#In this examples, the 1d case will be shown
# -*- coding: utf-8 -*-
# =============================================================================
# Here we will be testing with mixtures of student-t, 1d
# =============================================================================
import sys
sys.path.insert(0,"../../src")
import math
import functools
import time
import os
import torch
import numpy as np
from scipy.special import gamma
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from src.variational_boosting_bmc import VariationalBoosting
from src import vb_utils
from src import sampling
from src.utils import TicToc
from source_2d_likelihood_fn import compute_log_likelihood
np.random.seed(100)
torch.manual_seed(100)
tictoc = TicToc()
#%% priors
#x0 : Unif(0,1)
#Ts : Unif(0,0.5)
lambd_rho = 20.0 #rho : Exp(20)
lambd_q = 0.2 #q : Exp(0.2)
#%%%
def logit_t(x,a=0,b=1):
return torch.log(((x-a)/(b-a))/(1.0-(x-a)/(b-a)))
def sigmoid(x,a=0,b=1):
return (b-a)*1.0/(1.0+np.exp(-x)) + a
def dsigmoid(x,a=0,b=1):
return (b-a)*np.exp(x)/((1+np.exp(x))**2)
def exp(x):
return np.exp(x)
def dexp(x):
return np.exp(x)
def unwarped_logjoint_np(x0,y0,Ts,q0,rho):
rho = 0.05
Ts = 0.3
q0 = 6.366197723675814
ll = compute_log_likelihood(x0,y0,rho,q0,Ts)
# ll += -np.log(1+(q0/10.0)**2)
# ll += -np.log(1+(rho/0.1)**2)
return ll
def logjoint_np(x):
# x0,y0,Ts,q0,rho = x[0],x[1],x[2],x[3],x[4]
x0,y0,Ts,q0,rho = x[0],x[1],0.0,0.0,0.0
ll = unwarped_logjoint_np(sigmoid(x0),sigmoid(y0),
sigmoid(Ts,b=0.4),
exp(q0),exp(rho)) + \
np.log(dsigmoid(x0)) + np.log(dsigmoid(y0))
# np.log(dsigmoid(Ts,b=0.4)) + \
# np.log(dexp(q0)) + np.log(dexp(rho))
return ll
def logjoint(sample):
return torch.tensor(logjoint_np(sample.flatten().numpy()))
#%%
#torch.randn(20)
dim = 2
nsamples = 20*dim
def sampling1(nsamples):
X1 = logit_t(torch.rand(nsamples,1))
X2 = logit_t(torch.rand(nsamples,1))
# X3 = logit_t(torch.rand(nsamples,1)*0.4,b=0.4)
# X4 = torch.log(torch.distributions.HalfCauchy(scale=10.0).rsample((nsamples,1)))
# X5 = torch.log(torch.distributions.HalfCauchy(scale=0.1).rsample((nsamples,1)))
sample = torch.cat([X1,X2],dim=1)
return sample
samples = sampling1(nsamples)
mu0 = torch.zeros(dim)
cov0 = 20.0*torch.ones(dim)
#%%
#samples = vb.samples
training_interval = 20
acquisitions = ["prospective","mmlt"]
vb = VariationalBoosting(dim,logjoint,samples,mu0,cov0,
bmc_type="FM",normalization_mode="normalize",
training_space="gspace",noise=1e-4,
kernel_function="PMat",matern_coef=1.5,
numstab=-50.0,degree_hermite=50)
vb.optimize_bmc_model(maxiter=500,verbose=1,
lr=0.05)
#%%
elbo_list = [vb.evidence_lower_bound(nsamples=10000).cpu().numpy()]
kl_list = [vb.kullback_proposal_bmc(10000).item()]
step_list = [0]
time_list = [0.0]
#%%
print("Active sampling...")
for i in range(10*dim):
vb.update_bmcmodel(acquisition="mmlt",mode="optimizing",vreg=1e-2)
vb.update_full()
folder_name = "testheat1c"
try:
os.mkdir(folder_name)
except FileExistsError:
pass
vb.save_distribution("%s/mvn%i"%(folder_name,0))
#%%
print(torch.sigmoid(vb.currentq_mean))
print(elbo_list[-1])
print(kl_list[-1])
#%%
for i in range(100):
tictoc.tic()
_ = vb.update(maxiter_nc=300,lr_nc=0.1,b_sn=0.1,
n_samples_nc=500,n_samples_sn=300,n_iter_sn=300,
max_alpha=1.0,verbose=0)
try:
acquisition = np.random.choice(acquisitions)
vb.update_bmcmodel(acquisition=acquisition,mode="optimizing",
acq_reg=1e-1,verbose=0)
except:
continue
if ((i+1)%training_interval) == 0:
vb.update_full()
elapsed = tictoc.toc(printing=False)
elbo_list.append(vb.evidence_lower_bound(nsamples=10000).cpu().numpy())
kl_list.append(vb.kullback_proposal_bmc(10000).item())
step_list.append(i+1)
time_list.append(elapsed)
print(torch.sigmoid(vb.currentq_mean))
print(acquisition)
print(elbo_list[-1])
print(kl_list[-1])
print(time_list[-1])
print("Step %i"%(i+1))
vb.save_distribution("%s/mvn%i"%(folder_name,i+1))
elbo_np = np.array(elbo_list).astype(float)
step_list_np = np.array(step_list).astype(int)
times_np = np.array(time_list)
np.savez("%s/tracking"%folder_name,
time=times_np,
elbo=elbo_np,
steps=step_list_np)
#%%
Nplot = 41
x,y = torch.linspace(-10,10,Nplot),torch.linspace(-10,10,Nplot)
X,Y = torch.meshgrid(x,y)
XY_ = torch.stack([X,Y],dim=-1).reshape(-1,2)
Zgp_ = vb.scaled_prediction(XY_)
Zgp = Zgp_.reshape(*X.shape)
Zq_ = vb.current_logq(XY_)
Zq = Zq_.reshape(*X.shape)
fig1 = plt.figure()
ax1 = fig1.add_subplot(111, projection='3d')
ax1.plot_surface(X.numpy(),Y.numpy(),Zgp.numpy())
plt.show()
fig2 = plt.figure()
ax2 = fig2.add_subplot(111, projection='3d')
ax2.plot_surface(X.numpy(),Y.numpy(),Zq.numpy())
plt.show()
#%%
Nplot = 41
x = torch.linspace(-5,5,Nplot).reshape(-1,1)
xy_ = torch.cat([x,1.0986122886681098*torch.ones_like(x)],dim=-1)
zgp = vb.scaled_prediction(xy_)
zvb = vb.current_logq(xy_)
fig,ax = plt.subplots()
plt.plot(x.numpy(),zgp.numpy())
fig,ax = plt.subplots()
plt.plot(x.numpy(),zvb.detach().numpy())
#%%
Nplot = 21
x,y = torch.linspace(-10,10,Nplot),torch.linspace(-10,10,Nplot)
X,Y = torch.meshgrid(x,y)
XY_ = torch.stack([X,Y],dim=-1).reshape(-1,2)
Ztrue_ = np.array([logjoint_np(xy) for xy in XY_.numpy()]).reshape(-1,1)
Ztrue = Ztrue_.reshape(*X.shape)
fig3 = plt.figure()
ax3 = fig3.add_subplot(111, projection='3d')
ax3.plot_surface(X.numpy(),Y.numpy(),Ztrue)
plt.show()
#Zgp_ = vb.scaled_prediction(XY_)
#Zgp = Zgp_.reshape(*X.shape)
#Zq_ = vb.current_logq(XY_)
#Zq = Zq_.reshape(*X.shape)
#
#fig1 = plt.figure()
#ax1 = fig1.add_subplot(111, projection='3d')
#ax1.plot_surface(X.numpy(),Y.numpy(),Zgp.numpy())
#plt.show()
#
#fig2 = plt.figure()
#ax2 = fig2.add_subplot(111, projection='3d')
#ax2.plot_surface(X.numpy(),Y.numpy(),Zq.numpy())
#plt.show()
|
[
"os.mkdir",
"numpy.random.seed",
"torch.cat",
"matplotlib.pyplot.figure",
"numpy.exp",
"src.utils.TicToc",
"torch.ones",
"src.variational_boosting_bmc.VariationalBoosting",
"numpy.random.choice",
"torch.zeros",
"matplotlib.pyplot.subplots",
"torch.log",
"matplotlib.pyplot.show",
"torch.manual_seed",
"torch.rand",
"numpy.savez",
"torch.ones_like",
"torch.stack",
"source_2d_likelihood_fn.compute_log_likelihood",
"sys.path.insert",
"torch.meshgrid",
"torch.sigmoid",
"numpy.array",
"torch.linspace"
] |
[((346, 377), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../../src"""'], {}), "(0, '../../src')\n", (361, 377), False, 'import sys\n'), ((766, 785), 'numpy.random.seed', 'np.random.seed', (['(100)'], {}), '(100)\n', (780, 785), True, 'import numpy as np\n'), ((786, 808), 'torch.manual_seed', 'torch.manual_seed', (['(100)'], {}), '(100)\n', (803, 808), False, 'import torch\n'), ((818, 826), 'src.utils.TicToc', 'TicToc', ([], {}), '()\n', (824, 826), False, 'from src.utils import TicToc\n'), ((2431, 2447), 'torch.zeros', 'torch.zeros', (['dim'], {}), '(dim)\n', (2442, 2447), False, 'import torch\n'), ((2569, 2792), 'src.variational_boosting_bmc.VariationalBoosting', 'VariationalBoosting', (['dim', 'logjoint', 'samples', 'mu0', 'cov0'], {'bmc_type': '"""FM"""', 'normalization_mode': '"""normalize"""', 'training_space': '"""gspace"""', 'noise': '(0.0001)', 'kernel_function': '"""PMat"""', 'matern_coef': '(1.5)', 'numstab': '(-50.0)', 'degree_hermite': '(50)'}), "(dim, logjoint, samples, mu0, cov0, bmc_type='FM',\n normalization_mode='normalize', training_space='gspace', noise=0.0001,\n kernel_function='PMat', matern_coef=1.5, numstab=-50.0, degree_hermite=50)\n", (2588, 2792), False, 'from src.variational_boosting_bmc import VariationalBoosting\n'), ((4517, 4536), 'numpy.array', 'np.array', (['time_list'], {}), '(time_list)\n', (4525, 4536), True, 'import numpy as np\n'), ((4537, 4628), 'numpy.savez', 'np.savez', (["('%s/tracking' % folder_name)"], {'time': 'times_np', 'elbo': 'elbo_np', 'steps': 'step_list_np'}), "('%s/tracking' % folder_name, time=times_np, elbo=elbo_np, steps=\n step_list_np)\n", (4545, 4628), True, 'import numpy as np\n'), ((4734, 4754), 'torch.meshgrid', 'torch.meshgrid', (['x', 'y'], {}), '(x, y)\n', (4748, 4754), False, 'import torch\n'), ((4924, 4936), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4934, 4936), True, 'import matplotlib.pyplot as plt\n'), ((5032, 5042), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5040, 5042), True, 'import matplotlib.pyplot as plt\n'), ((5051, 5063), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5061, 5063), True, 'import matplotlib.pyplot as plt\n'), ((5158, 5168), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5166, 5168), True, 'import matplotlib.pyplot as plt\n'), ((5363, 5377), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (5375, 5377), True, 'import matplotlib.pyplot as plt\n'), ((5419, 5433), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (5431, 5433), True, 'import matplotlib.pyplot as plt\n'), ((5560, 5580), 'torch.meshgrid', 'torch.meshgrid', (['x', 'y'], {}), '(x, y)\n', (5574, 5580), False, 'import torch\n'), ((5739, 5751), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5749, 5751), True, 'import matplotlib.pyplot as plt\n'), ((5841, 5851), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5849, 5851), True, 'import matplotlib.pyplot as plt\n'), ((972, 1028), 'torch.log', 'torch.log', (['((x - a) / (b - a) / (1.0 - (x - a) / (b - a)))'], {}), '((x - a) / (b - a) / (1.0 - (x - a) / (b - a)))\n', (981, 1028), False, 'import torch\n'), ((1175, 1184), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (1181, 1184), True, 'import numpy as np\n'), ((1209, 1218), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (1215, 1218), True, 'import numpy as np\n'), ((1327, 1370), 'source_2d_likelihood_fn.compute_log_likelihood', 'compute_log_likelihood', (['x0', 'y0', 'rho', 'q0', 'Ts'], {}), '(x0, y0, rho, q0, Ts)\n', (1349, 1370), False, 'from source_2d_likelihood_fn import compute_log_likelihood\n'), ((2352, 2378), 'torch.cat', 'torch.cat', (['[X1, X2]'], {'dim': '(1)'}), '([X1, X2], dim=1)\n', (2361, 2378), False, 'import torch\n'), ((2460, 2475), 'torch.ones', 'torch.ones', (['dim'], {}), '(dim)\n', (2470, 2475), False, 'import torch\n'), ((3293, 3314), 'os.mkdir', 'os.mkdir', (['folder_name'], {}), '(folder_name)\n', (3301, 3314), False, 'import os\n'), ((3407, 3438), 'torch.sigmoid', 'torch.sigmoid', (['vb.currentq_mean'], {}), '(vb.currentq_mean)\n', (3420, 3438), False, 'import torch\n'), ((4670, 4700), 'torch.linspace', 'torch.linspace', (['(-10)', '(10)', 'Nplot'], {}), '(-10, 10, Nplot)\n', (4684, 4700), False, 'import torch\n'), ((4699, 4729), 'torch.linspace', 'torch.linspace', (['(-10)', '(10)', 'Nplot'], {}), '(-10, 10, Nplot)\n', (4713, 4729), False, 'import torch\n'), ((5496, 5526), 'torch.linspace', 'torch.linspace', (['(-10)', '(10)', 'Nplot'], {}), '(-10, 10, Nplot)\n', (5510, 5526), False, 'import torch\n'), ((5525, 5555), 'torch.linspace', 'torch.linspace', (['(-10)', '(10)', 'Nplot'], {}), '(-10, 10, Nplot)\n', (5539, 5555), False, 'import torch\n'), ((2051, 2074), 'torch.rand', 'torch.rand', (['nsamples', '(1)'], {}), '(nsamples, 1)\n', (2061, 2074), False, 'import torch\n'), ((2092, 2115), 'torch.rand', 'torch.rand', (['nsamples', '(1)'], {}), '(nsamples, 1)\n', (2102, 2115), False, 'import torch\n'), ((3716, 3746), 'numpy.random.choice', 'np.random.choice', (['acquisitions'], {}), '(acquisitions)\n', (3732, 3746), True, 'import numpy as np\n'), ((4203, 4234), 'torch.sigmoid', 'torch.sigmoid', (['vb.currentq_mean'], {}), '(vb.currentq_mean)\n', (4216, 4234), False, 'import torch\n'), ((4425, 4444), 'numpy.array', 'np.array', (['elbo_list'], {}), '(elbo_list)\n', (4433, 4444), True, 'import numpy as np\n'), ((4474, 4493), 'numpy.array', 'np.array', (['step_list'], {}), '(step_list)\n', (4482, 4493), True, 'import numpy as np\n'), ((4760, 4787), 'torch.stack', 'torch.stack', (['[X, Y]'], {'dim': '(-1)'}), '([X, Y], dim=-1)\n', (4771, 4787), False, 'import torch\n'), ((5188, 5216), 'torch.linspace', 'torch.linspace', (['(-5)', '(5)', 'Nplot'], {}), '(-5, 5, Nplot)\n', (5202, 5216), False, 'import torch\n'), ((5586, 5613), 'torch.stack', 'torch.stack', (['[X, Y]'], {'dim': '(-1)'}), '([X, Y], dim=-1)\n', (5597, 5613), False, 'import torch\n'), ((1123, 1132), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (1129, 1132), True, 'import numpy as np\n'), ((5267, 5285), 'torch.ones_like', 'torch.ones_like', (['x'], {}), '(x)\n', (5282, 5285), False, 'import torch\n'), ((1065, 1075), 'numpy.exp', 'np.exp', (['(-x)'], {}), '(-x)\n', (1071, 1075), True, 'import numpy as np\n'), ((1137, 1146), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (1143, 1146), True, 'import numpy as np\n')]
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import copy
# models and dataset import
from sklearn import tree, svm
from sklearn.neural_network import MLPClassifier
from sklearn.multiclass import OneVsRestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.decomposition import PCA
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics import cohen_kappa_score
from sklearn.metrics import roc_auc_score
from sklearn.metrics.pairwise import pairwise_distances
from sklearn.cluster import KMeans
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import KBinsDiscretizer
from yellowbrick.cluster import KElbowVisualizer
# GS and our code import
from growingspheres import counterfactuals as cf
from growingspheres.utils.gs_utils import get_distances, generate_inside_field, distances
from generate_dataset import generate_dataset
from plot_functions import plot_hyperplane, plot_subfigure, final_plot, pick_anchors_informations, draw_rectangle, draw_linear_explanation
from anchors import utils, anchor_tabular, anchor_base
from lime.lime_text import LimeTextExplainer
from anchors import limes
import pyfolding as pf
import baseGraph
import spacy
import os
import ape_tabular
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestNeighbors
PATH = ''
def get_farthest_distance(train_data, target_instance, ape):
farthest_distance = 0
for training_instance in train_data:
# get_distances is similar to pairwise distance (i.e: it is the same results for euclidean distance)
# but it adds a sparsity distance computation (i.e: number of same values)
#farthest_distance_now = get_distances(training_instance, instance, categorical_features=self.categorical_features)["euclidean"]
farthest_distance_now = distances(training_instance, target_instance, ape)
if farthest_distance_now > farthest_distance:
farthest_distance = farthest_distance_now
return farthest_distance
def convert_raw_data(obs, dataset_name):
# Convert a vectorize sentence into a raw text data
if dataset_name == "multilabel": obs = obs.reshape(1, -1)
return obs
def compute_distance(point, clf):
# Compute the distance from a decision frontier for a point
try:
distance = clf.predict_proba([point])
except AttributeError:
distance = clf.decision_function([point])
return distance
def instance_around_multiple_classes(smallest_zero, largest_zero, smallest_two, largest_two):
# Return True if the mean of the smallest and largest value from a class is contains
# in the range of the smallest and the largest distance of a different class
return smallest_zero < ((largest_two + smallest_two)/2) < largest_zero or smallest_two < ((largest_zero + smallest_zero)/2) < largest_two
def get_points_from_class(classe, x_data, clf, dataset=None):
# Return each points from X that belongs to the class classe following the prediction of clf
try:
return x_data[np.where(clf.predict(x_data) == classe)], np.where(clf.predict(x_data) == classe)[0]
except:
return x_data[np.where(clf.predict(x_data)[:, classe])], np.where(clf.predict(x_data) == classe)[0]
def get_growing_sphere_from_class(opponent_class, growing_sphere_enemies, raw_data, clf,
continuous_features, categorical_features, categorical_values, obs, ape):
growing_sphere = cf.CounterfactualExplanation(raw_data, clf.predict, method='GF', target_class=opponent_class,
continuous_features=continuous_features, categorical_features=categorical_features, categorical_values=categorical_values,
max_features=ape.max_features, min_features=ape.min_features)
growing_sphere.fit(n_in_layer=2000, first_radius=0.1, dicrease_radius=10, sparse=True, verbose=True, feature_variance=ape.feature_variance)
final_largest = distances(growing_sphere.onevsrest[-1], raw_data, ape)
final_smallest = distances(growing_sphere.onevsrest[0], raw_data, ape)
for instance_test in growing_sphere.onevsrest:
distance_counterfactual = distances(instance_test, raw_data, ape)
if distance_counterfactual > final_largest:
print("yeah we expand distance from", final_largest, "to", distance_counterfactual, "distance")
final_largest = distance_counterfactual
elif distance_counterfactual < final_smallest:
print("Ooh we found smallest counterfactual", final_smallest, "from", distance_counterfactual)
final_smallest = distance_counterfactual
#largest = get_distances(growing_sphere.onevsrest[-1], obs)["euclidean"]
#smallest = get_distances(growing_sphere.onevsrest[0], obs)["euclidean"]
if opponent_class == None:
print("classe la plus proche : ", clf.predict(growing_sphere.onevsrest[0].reshape(1, -1)))
print("largest distance from class ", opponent_class, " : " , final_largest)
print("smallest distance from class ", opponent_class, " : " , final_smallest)
return growing_sphere.enemy, final_largest, final_smallest, growing_sphere.onevsrest, growing_sphere.radius
def preparing_dataset(x, y, plot, dataset_name, model):
if plot:
x = PCA(n_components=2).fit_transform(x)
dataset = utils.load_dataset(dataset_name, balance=True, discretize=False, dataset_folder="./dataset", X=x, y=y, plot=plot)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.5)
return dataset, model, x_train, x_test, y_train, y_test
def plot_explanation_results(X, cnt, x_test, y_test, multiclass, model, obs, x_in_sphere,
y_in_sphere, anchor_exp, plt, x_tree, y_tree,
linear_model, plot=True, instances_x_test=[]):
x_min = min([x[0] for x in X])
x_max = max([x[0] for x in X])
y_min = min([y[1] for y in X])
y_max = max([y[1] for y in X])
"""
# Informations sur le modèle linéaire appris par Lime
x_plot = np.linspace([x_min-5, y_min -5], [x_max+5, y_max +5], 10)
y_plot = lime_exp.easy_model.predict(x_plot)
print("ordonnées à l'origine ", lime_exp.intercept)
print("coef", lime_exp.as_list(label=target_class))
coef_ = lime_exp.as_list(label=target_class)[0][1] * x_plot + lime_exp.intercept[target_class]
"""
#if cnt < 1 :
(None, plot_subfigure(model, x_test, y_test, x_tree=x_tree, y_tree=y_tree, multiclass=multiclass, clf=model, x_min=x_min-8, y_min=y_min-8, x_max=x_max+8,
y_max=y_max+8, target=obs, x_sphere=x_in_sphere, y_sphere=y_in_sphere, linear_model=linear_model))
"""
else:
auc_score = draw_linear_explanation(plt, x_in_sphere, y_in_sphere, model, x_min, y_min, x_max, y_max, obs,
model.predict(obs.reshape(1, -1)), len(set(y_test)))
"""
#plt.scatter(instances_x_test[:,0], instances_x_test[:,1], marker='*', s=260, color='green', label='instances radius 1')
plt.scatter(obs[0],obs[1], marker='*', s=260, color='yellow', label='target instance')
x_min_anchors, y_min_anchors, width, height = pick_anchors_informations(anchor_exp.names(), x_min=x_min-4,
y_min=y_min-4, width=x_max-x_min+8, height=y_max-y_min+8)
draw_rectangle(plt, x_min_anchors, y_min_anchors, width, height, cnt)
def return_instance_in_cf_growing_sphere(x_instances_to_test_if_in_sphere, obs, closest_counterfactual, longest_distance_other, ape):#, growing_sphere_zero_enemies):
# Compute the shortest distance and return instance in the area of an hyper sphere of radius equals to the minimal distance
# of a counterfactual instance to the target instance
#longest_distance_other = pairwise_distances(obs.reshape(1, -1), growing_sphere_other_enemies[-1].reshape(1, -1))[0][0]
#longest_distance_zero = pairwise_distances(obs.reshape(1, -1), growing_sphere_zero_enemies[-1].reshape(1, -1))[0][0]
#counterfactual_explanation = min(longest_distance_other, longest_distance_zero)
counterfactual_explanation = longest_distance_other
print("l'ennemi le plus loin est a une distance de : ", counterfactual_explanation)
print("La cible est : ", obs, " l'instance contrefactuelle est : ", closest_counterfactual)
position_instances_in_sphere = []
nb_instance_in_sphere = 0
for position, i in enumerate(x_instances_to_test_if_in_sphere):
#if pairwise_distances(closest_counterfactual.reshape(1, -1), i.reshape(1, -1))[0] < counterfactual_explanation:
if distances(i, obs, ape) < counterfactual_explanation:
position_instances_in_sphere.append(position)
nb_instance_in_sphere += 1
print("nb instances dans la sphere ", nb_instance_in_sphere)
nb_instance_in_sphere = 100 if nb_instance_in_sphere == 0 else nb_instance_in_sphere
return position_instances_in_sphere, nb_instance_in_sphere
def minimum_instance_in_sphere(nb_min_instance_in_sphere, nb_instance_in_sphere, closest_counterfactual, radius_enemies, percentage_instance_sphere,
x_train, position_instances_in_sphere, target_class, model, ape):
nb_different_outcome = 0
while nb_different_outcome < nb_min_instance_in_sphere:
nb_instance_in_sphere = 2*nb_instance_in_sphere
generated_instances_inside_sphere = generate_inside_field(closest_counterfactual, (0, radius_enemies),
(int) (nb_instance_in_sphere*2*percentage_instance_sphere),
ape.max_features, ape.min_features, ape.feature_variance)
x_in_sphere = np.append(x_train[position_instances_in_sphere], generated_instances_inside_sphere, axis=0) if position_instances_in_sphere != [] else generated_instances_inside_sphere
y_in_sphere = model.predict(x_in_sphere)#y[position_instances_in_sphere]
#print('il y a ', nb_instance_in_sphere, " instances dans la sphère sur ", len(x_train), " instances au total.")
for y_sphere in y_in_sphere:
if y_sphere != target_class:
nb_different_outcome += 1
print('il y a ', nb_different_outcome, " instances d'une classe différente dans la sphère sur ", len(x_in_sphere))
return x_in_sphere, y_in_sphere
if __name__ == "__main__":
plot = True
verbose = True
nb_instance = 10
nb_min_instance_in_sphere = 20
threshold_interpretability = 0.95 # Threshold for minimum accuracy of Lime and Anchors
percentage_instance_sphere = 0.1 # less corresponds to less artificial instances generated inside the sphere.
dataset_name = "generate_moons" # generate_blobs, generate_moons, iris, adult, titanic, blood
models = [OneVsRestClassifier(svm.SVC(kernel='linear', probability=True)),
tree.DecisionTreeClassifier(), svm.SVC(kernel='linear', probability=True), MLPClassifier(random_state=1, max_iter=300),
LogisticRegression(),
GradientBoostingClassifier(n_estimators=50, learning_rate=1.0, max_depth=1)]
models = [tree.DecisionTreeClassifier(max_depth=3), LogisticRegression(), RandomForestClassifier(), GradientBoostingClassifier(n_estimators=50)]
for nb_model, model in enumerate(models):
model_name = type(model).__name__
print("LE MODELE UTILISE EST : ", model_name)
X, y, class_names, regression, multiclass, continuous_features, categorical_features, categorical_values, categorical_names = generate_dataset(dataset_name)
dataset, model, x_train, x_test, y_train, y_test = preparing_dataset(X, y, plot, dataset_name, model)
"""
anchor_explainer, lime_explainer, c, model, x_test_class, x_raw_text, predict_fn, predict_fn_lime = compute_model_and_interpretability_models(regression,
class_names, model, dataset, x_train, y_train,
x_test, y_test, target_class, dataset_name)
"""
model = model.fit(x_train, y_train)
print(' ### Accuracy:', model.score(x_test, y_test)) if regression else print(' ### Accuracy:', sum(model.predict(x_test) == y_test)/len(y_test))
# Initialize for plot
"""cf_list_zero = []
cf_list_two = []"""
counterfactual_instances = []
for cnt, obs in enumerate(x_test):
explainer = ape_tabular.ApeTabularExplainer(x_train, class_names, model.predict, #black_box.predict_proba,
continuous_features=continuous_features,
categorical_features=categorical_features, categorical_values=categorical_values,
feature_names=dataset.feature_names, categorical_names=categorical_names,
verbose=True, threshold_precision=threshold_interpretability)
neigh = NearestNeighbors(n_neighbors=2, algorithm='ball_tree', metric=distances, metric_params={"ape": explainer})
#neigh.fit([x_test, x_train, explainer.max_features, explainer.min_features], y_test)
neigh.fit(x_test, y_test)
print("TEST", neigh.kneighbors(x_test))
precision, coverage, f2, multimodal_result = explainer.explain_instance(obs,
all_explanations_model=True)
target_class = model.predict(obs.reshape(1, -1))[0]
explainer.target_class = target_class
if cnt == nb_instance:
break
print('====================================================', cnt)
raw_data = convert_raw_data(obs, dataset_name)
print("observation ", raw_data)
print("Try to find closest boundary ")
growing_sphere = cf.CounterfactualExplanation(raw_data, model.predict, method='GF', target_class=None,
continuous_features=continuous_features, categorical_features=categorical_features,
categorical_values=categorical_values, max_features=explainer.max_features,
min_features=explainer.min_features)
growing_sphere.fit(n_in_layer=2000, first_radius=0.1, dicrease_radius=10, sparse=True, verbose=True,
feature_variance=explainer.feature_variance)
farthest_distance = get_farthest_distance(x_train, explainer.closest_counterfactual, explainer)
position_instances_in_sphere, nb_training_instance_in_sphere = explainer.instances_from_dataset_inside_sphere(explainer.closest_counterfactual,
explainer.extended_radius, x_train)
print("Generate instances in the area of a sphere around the counter factual instance")
instances_in_sphere, _, _, _ = explainer.generate_instances_inside_sphere(explainer.extended_radius,
explainer.closest_counterfactual, x_test, farthest_distance,
explainer.nb_min_instance_per_class_in_sphere,
[], 0)
counterfactual_instances_in_sphere = np.array(explainer.store_counterfactual_instances_in_sphere(instances_in_sphere, explainer.target_class))
print("done generating")
print()
print()
x_instances_to_test_if_in_sphere = x_train
position_instances_in_sphere, nb_instance_in_sphere = return_instance_in_cf_growing_sphere(x_instances_to_test_if_in_sphere, obs,
growing_sphere.enemy, farthest_distance, explainer)
if explainer.target_class != target_class:
print()
print()
print("il y a un problème dans les classes cibles")
print()
print
x_in_sphere, y_in_sphere = minimum_instance_in_sphere(nb_min_instance_in_sphere, nb_instance_in_sphere,
growing_sphere.enemy, growing_sphere.radius,
percentage_instance_sphere, x_train, position_instances_in_sphere,
explainer.target_class, model, explainer)
# Le hic est sur le nombre de counterfactual instances
print("IL Y A ", len(counterfactual_instances_in_sphere), " ENEMIES over", len(instances_in_sphere))
results = pf.FTU(counterfactual_instances_in_sphere, routine="python")
print(results)
multimodal_results = results.folding_statistics<1
#if multimodal_results:
visualizer = KElbowVisualizer(KMeans(), k=(1,8))
x_elbow = np.array(counterfactual_instances_in_sphere)
visualizer.fit(x_elbow)
n_clusters = visualizer.elbow_value_
if n_clusters is not None:
if verbose: print("n CLUSTERS ", n_clusters)
kmeans = KMeans(n_clusters=n_clusters)
kmeans.fit(counterfactual_instances_in_sphere)
clusters_centers = kmeans.cluster_centers_
if verbose: print("Mean center of clusters from KMEANS ", clusters_centers)
plt.show(block=False)
plt.show(block=False)
plt.pause(1)
plt.close('all')
print("searching for explanations...")
anchor_exp = explainer.anchor_explainer.explain_instance(obs, model.predict, threshold=threshold_interpretability,
delta=0.1, tau=0.15, batch_size=100, max_anchor_size=None,
stop_on_first=False, desired_label=None, beam_size=4)
print("anchors explanation find, now let's go for Lime !")
lime_exp = explainer.lime_explainer.explain_instance_training_dataset(explainer.closest_counterfactual, model.predict_proba,
num_features=4, model_regressor=LogisticRegression(),
instances_in_sphere=instances_in_sphere)
linear_model = lime_exp.easy_model
print("la précision de Anchors est de ", anchor_exp.precision())
print('Lime explanation')
print('\n'.join(map(str, lime_exp.as_list())))
print('Anchor: %s' % (' AND '.join(anchor_exp.names())))
print("predict observation; ",model.predict(obs.reshape(1, -1)))
for i, j, k, l in zip (precision, f2, coverage, ['LS extend', 'APE', 'anchor']):
print()
if i == 0:
print("YEAH")
elif i == 0.0:
print("okay")
"""
if i == 0 and ("Tree" in model_name or "random" in model_name):
print()
tree.plot_tree(model)
"""
print("precision of", l, i)
print("f2 of", l, j)
print("coverage of", l, k)
# Changing type of data return by growing sphere to be numpy for ploting
#cf_list_zero = np.array(growing_sphere_zero_enemies)
cf_list_two = np.array([])#growing_sphere_two_enemies) if multiclass else None
cf_list_other = np.array(growing_sphere.onevsrest)
filename = os.getcwd() + "/results/"+ dataset_name+ "/" +type(model).__name__+ "/"
print("filename", filename)
#if precision[1] < precision[0] or precision[1] < precision[2]:
y_in_sphere = model.predict(instances_in_sphere)
x_in_sphere = instances_in_sphere[np.where([y == explainer.target_class for y in y_in_sphere])]
x_sphere, y_sphere = [], []
for element in x_in_sphere:
x_sphere.append(element[0])
y_sphere.append(element[1])
"""instances_in_sphere_test, _, _, _ = explainer.generate_instances_inside_sphere(1,
explainer.closest_counterfactual, x_test, farthest_distance,
explainer.nb_min_instance_per_class_in_sphere,
[], 0)
"""
print_x_test, print_x_train, print_y_test, print_y_train = train_test_split(x_train, y_train, test_size=0.4, random_state=42)
if coverage[2] == 1:
print("La couverture est de 1 donc je regarde si il faut changer le test split, c'est pour vérifier que toutes les instances sur lesquelles je calcule la couverture")
plot_explanation_results(X, cnt, print_x_test, print_y_test, multiclass,
model, obs, counterfactual_instances_in_sphere,
[explainer.target_class]*len(counterfactual_instances_in_sphere), anchor_exp,
plt, x_sphere, y_sphere, linear_model, plot, instances_in_sphere_test)
final_plot(cf_list_two, cf_list_other, kmeans.cluster_centers_, counterfactual_instances_in_sphere,
instances_in_sphere, multiclass, filename)
#elif precision[1] < precision [0] or precision[1] < precision[2]:
# print("ca me saoule")
else:
plot_explanation_results(X, cnt, print_x_train, print_y_train, multiclass,
model, obs, counterfactual_instances_in_sphere,
[explainer.target_class]*len(counterfactual_instances_in_sphere), anchor_exp,
plt, x_sphere, y_sphere, linear_model, plot)#, instances_in_sphere_test)
final_plot(cf_list_two, cf_list_other, kmeans.cluster_centers_, counterfactual_instances_in_sphere,
instances_in_sphere, multiclass, filename)
# TODO verifier que l'on ne fait pas de l'overfitting en regardant si c'est différent
plot_explanation_results(X, cnt, print_x_test, print_y_test, multiclass,
model, obs, counterfactual_instances_in_sphere,
[explainer.target_class]*len(counterfactual_instances_in_sphere), anchor_exp,
plt, x_sphere, y_sphere, linear_model,plot)#, instances_in_sphere_test)
final_plot(cf_list_two, cf_list_other, kmeans.cluster_centers_, counterfactual_instances_in_sphere,
instances_in_sphere, multiclass, filename)
|
[
"pyfolding.FTU",
"sklearn.model_selection.train_test_split",
"sklearn.tree.DecisionTreeClassifier",
"sklearn.neural_network.MLPClassifier",
"sklearn.svm.SVC",
"matplotlib.pyplot.close",
"sklearn.cluster.KMeans",
"growingspheres.utils.gs_utils.distances",
"numpy.append",
"sklearn.neighbors.NearestNeighbors",
"matplotlib.pyplot.pause",
"sklearn.ensemble.RandomForestClassifier",
"matplotlib.pyplot.show",
"plot_functions.plot_subfigure",
"sklearn.linear_model.LogisticRegression",
"plot_functions.final_plot",
"generate_dataset.generate_dataset",
"os.getcwd",
"matplotlib.pyplot.scatter",
"growingspheres.counterfactuals.CounterfactualExplanation",
"ape_tabular.ApeTabularExplainer",
"sklearn.ensemble.GradientBoostingClassifier",
"numpy.where",
"numpy.array",
"sklearn.decomposition.PCA",
"plot_functions.draw_rectangle",
"anchors.utils.load_dataset"
] |
[((3667, 3963), 'growingspheres.counterfactuals.CounterfactualExplanation', 'cf.CounterfactualExplanation', (['raw_data', 'clf.predict'], {'method': '"""GF"""', 'target_class': 'opponent_class', 'continuous_features': 'continuous_features', 'categorical_features': 'categorical_features', 'categorical_values': 'categorical_values', 'max_features': 'ape.max_features', 'min_features': 'ape.min_features'}), "(raw_data, clf.predict, method='GF',\n target_class=opponent_class, continuous_features=continuous_features,\n categorical_features=categorical_features, categorical_values=\n categorical_values, max_features=ape.max_features, min_features=ape.\n min_features)\n", (3695, 3963), True, 'from growingspheres import counterfactuals as cf\n'), ((4144, 4198), 'growingspheres.utils.gs_utils.distances', 'distances', (['growing_sphere.onevsrest[-1]', 'raw_data', 'ape'], {}), '(growing_sphere.onevsrest[-1], raw_data, ape)\n', (4153, 4198), False, 'from growingspheres.utils.gs_utils import get_distances, generate_inside_field, distances\n'), ((4220, 4273), 'growingspheres.utils.gs_utils.distances', 'distances', (['growing_sphere.onevsrest[0]', 'raw_data', 'ape'], {}), '(growing_sphere.onevsrest[0], raw_data, ape)\n', (4229, 4273), False, 'from growingspheres.utils.gs_utils import get_distances, generate_inside_field, distances\n'), ((5520, 5637), 'anchors.utils.load_dataset', 'utils.load_dataset', (['dataset_name'], {'balance': '(True)', 'discretize': '(False)', 'dataset_folder': '"""./dataset"""', 'X': 'x', 'y': 'y', 'plot': 'plot'}), "(dataset_name, balance=True, discretize=False,\n dataset_folder='./dataset', X=x, y=y, plot=plot)\n", (5538, 5637), False, 'from anchors import utils, anchor_tabular, anchor_base\n'), ((5674, 5711), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': '(0.5)'}), '(x, y, test_size=0.5)\n', (5690, 5711), False, 'from sklearn.model_selection import train_test_split\n'), ((7343, 7435), 'matplotlib.pyplot.scatter', 'plt.scatter', (['obs[0]', 'obs[1]'], {'marker': '"""*"""', 's': '(260)', 'color': '"""yellow"""', 'label': '"""target instance"""'}), "(obs[0], obs[1], marker='*', s=260, color='yellow', label=\n 'target instance')\n", (7354, 7435), True, 'import matplotlib.pyplot as plt\n'), ((7664, 7733), 'plot_functions.draw_rectangle', 'draw_rectangle', (['plt', 'x_min_anchors', 'y_min_anchors', 'width', 'height', 'cnt'], {}), '(plt, x_min_anchors, y_min_anchors, width, height, cnt)\n', (7678, 7733), False, 'from plot_functions import plot_hyperplane, plot_subfigure, final_plot, pick_anchors_informations, draw_rectangle, draw_linear_explanation\n'), ((2031, 2081), 'growingspheres.utils.gs_utils.distances', 'distances', (['training_instance', 'target_instance', 'ape'], {}), '(training_instance, target_instance, ape)\n', (2040, 2081), False, 'from growingspheres.utils.gs_utils import get_distances, generate_inside_field, distances\n'), ((4359, 4398), 'growingspheres.utils.gs_utils.distances', 'distances', (['instance_test', 'raw_data', 'ape'], {}), '(instance_test, raw_data, ape)\n', (4368, 4398), False, 'from growingspheres.utils.gs_utils import get_distances, generate_inside_field, distances\n'), ((6644, 6908), 'plot_functions.plot_subfigure', 'plot_subfigure', (['model', 'x_test', 'y_test'], {'x_tree': 'x_tree', 'y_tree': 'y_tree', 'multiclass': 'multiclass', 'clf': 'model', 'x_min': '(x_min - 8)', 'y_min': '(y_min - 8)', 'x_max': '(x_max + 8)', 'y_max': '(y_max + 8)', 'target': 'obs', 'x_sphere': 'x_in_sphere', 'y_sphere': 'y_in_sphere', 'linear_model': 'linear_model'}), '(model, x_test, y_test, x_tree=x_tree, y_tree=y_tree,\n multiclass=multiclass, clf=model, x_min=x_min - 8, y_min=y_min - 8,\n x_max=x_max + 8, y_max=y_max + 8, target=obs, x_sphere=x_in_sphere,\n y_sphere=y_in_sphere, linear_model=linear_model)\n', (6658, 6908), False, 'from plot_functions import plot_hyperplane, plot_subfigure, final_plot, pick_anchors_informations, draw_rectangle, draw_linear_explanation\n'), ((11218, 11247), 'sklearn.tree.DecisionTreeClassifier', 'tree.DecisionTreeClassifier', ([], {}), '()\n', (11245, 11247), False, 'from sklearn import tree, svm\n'), ((11249, 11291), 'sklearn.svm.SVC', 'svm.SVC', ([], {'kernel': '"""linear"""', 'probability': '(True)'}), "(kernel='linear', probability=True)\n", (11256, 11291), False, 'from sklearn import tree, svm\n'), ((11293, 11336), 'sklearn.neural_network.MLPClassifier', 'MLPClassifier', ([], {'random_state': '(1)', 'max_iter': '(300)'}), '(random_state=1, max_iter=300)\n', (11306, 11336), False, 'from sklearn.neural_network import MLPClassifier\n'), ((11356, 11376), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (11374, 11376), False, 'from sklearn.linear_model import LogisticRegression\n'), ((11394, 11469), 'sklearn.ensemble.GradientBoostingClassifier', 'GradientBoostingClassifier', ([], {'n_estimators': '(50)', 'learning_rate': '(1.0)', 'max_depth': '(1)'}), '(n_estimators=50, learning_rate=1.0, max_depth=1)\n', (11420, 11469), False, 'from sklearn.ensemble import GradientBoostingClassifier\n'), ((11489, 11529), 'sklearn.tree.DecisionTreeClassifier', 'tree.DecisionTreeClassifier', ([], {'max_depth': '(3)'}), '(max_depth=3)\n', (11516, 11529), False, 'from sklearn import tree, svm\n'), ((11531, 11551), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (11549, 11551), False, 'from sklearn.linear_model import LogisticRegression\n'), ((11553, 11577), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {}), '()\n', (11575, 11577), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((11579, 11622), 'sklearn.ensemble.GradientBoostingClassifier', 'GradientBoostingClassifier', ([], {'n_estimators': '(50)'}), '(n_estimators=50)\n', (11605, 11622), False, 'from sklearn.ensemble import GradientBoostingClassifier\n'), ((11900, 11930), 'generate_dataset.generate_dataset', 'generate_dataset', (['dataset_name'], {}), '(dataset_name)\n', (11916, 11930), False, 'from generate_dataset import generate_dataset\n'), ((8927, 8949), 'growingspheres.utils.gs_utils.distances', 'distances', (['i', 'obs', 'ape'], {}), '(i, obs, ape)\n', (8936, 8949), False, 'from growingspheres.utils.gs_utils import get_distances, generate_inside_field, distances\n'), ((10041, 10136), 'numpy.append', 'np.append', (['x_train[position_instances_in_sphere]', 'generated_instances_inside_sphere'], {'axis': '(0)'}), '(x_train[position_instances_in_sphere],\n generated_instances_inside_sphere, axis=0)\n', (10050, 10136), True, 'import numpy as np\n'), ((11157, 11199), 'sklearn.svm.SVC', 'svm.SVC', ([], {'kernel': '"""linear"""', 'probability': '(True)'}), "(kernel='linear', probability=True)\n", (11164, 11199), False, 'from sklearn import tree, svm\n'), ((12880, 13230), 'ape_tabular.ApeTabularExplainer', 'ape_tabular.ApeTabularExplainer', (['x_train', 'class_names', 'model.predict'], {'continuous_features': 'continuous_features', 'categorical_features': 'categorical_features', 'categorical_values': 'categorical_values', 'feature_names': 'dataset.feature_names', 'categorical_names': 'categorical_names', 'verbose': '(True)', 'threshold_precision': 'threshold_interpretability'}), '(x_train, class_names, model.predict,\n continuous_features=continuous_features, categorical_features=\n categorical_features, categorical_values=categorical_values,\n feature_names=dataset.feature_names, categorical_names=\n categorical_names, verbose=True, threshold_precision=\n threshold_interpretability)\n', (12911, 13230), False, 'import ape_tabular\n'), ((13495, 13605), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ([], {'n_neighbors': '(2)', 'algorithm': '"""ball_tree"""', 'metric': 'distances', 'metric_params': "{'ape': explainer}"}), "(n_neighbors=2, algorithm='ball_tree', metric=distances,\n metric_params={'ape': explainer})\n", (13511, 13605), False, 'from sklearn.neighbors import NearestNeighbors\n'), ((14418, 14718), 'growingspheres.counterfactuals.CounterfactualExplanation', 'cf.CounterfactualExplanation', (['raw_data', 'model.predict'], {'method': '"""GF"""', 'target_class': 'None', 'continuous_features': 'continuous_features', 'categorical_features': 'categorical_features', 'categorical_values': 'categorical_values', 'max_features': 'explainer.max_features', 'min_features': 'explainer.min_features'}), "(raw_data, model.predict, method='GF',\n target_class=None, continuous_features=continuous_features,\n categorical_features=categorical_features, categorical_values=\n categorical_values, max_features=explainer.max_features, min_features=\n explainer.min_features)\n", (14446, 14718), True, 'from growingspheres import counterfactuals as cf\n'), ((17306, 17366), 'pyfolding.FTU', 'pf.FTU', (['counterfactual_instances_in_sphere'], {'routine': '"""python"""'}), "(counterfactual_instances_in_sphere, routine='python')\n", (17312, 17366), True, 'import pyfolding as pf\n'), ((17588, 17632), 'numpy.array', 'np.array', (['counterfactual_instances_in_sphere'], {}), '(counterfactual_instances_in_sphere)\n', (17596, 17632), True, 'import numpy as np\n'), ((18099, 18120), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (18107, 18120), True, 'import matplotlib.pyplot as plt\n'), ((18133, 18154), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (18141, 18154), True, 'import matplotlib.pyplot as plt\n'), ((18167, 18179), 'matplotlib.pyplot.pause', 'plt.pause', (['(1)'], {}), '(1)\n', (18176, 18179), True, 'import matplotlib.pyplot as plt\n'), ((18192, 18208), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (18201, 18208), True, 'import matplotlib.pyplot as plt\n'), ((20062, 20074), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (20070, 20074), True, 'import numpy as np\n'), ((20155, 20189), 'numpy.array', 'np.array', (['growing_sphere.onevsrest'], {}), '(growing_sphere.onevsrest)\n', (20163, 20189), True, 'import numpy as np\n'), ((21209, 21275), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x_train', 'y_train'], {'test_size': '(0.4)', 'random_state': '(42)'}), '(x_train, y_train, test_size=0.4, random_state=42)\n', (21225, 21275), False, 'from sklearn.model_selection import train_test_split\n'), ((5469, 5488), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '(2)'}), '(n_components=2)\n', (5472, 5488), False, 'from sklearn.decomposition import PCA\n'), ((17547, 17555), 'sklearn.cluster.KMeans', 'KMeans', ([], {}), '()\n', (17553, 17555), False, 'from sklearn.cluster import KMeans\n'), ((17843, 17872), 'sklearn.cluster.KMeans', 'KMeans', ([], {'n_clusters': 'n_clusters'}), '(n_clusters=n_clusters)\n', (17849, 17872), False, 'from sklearn.cluster import KMeans\n'), ((20508, 20570), 'numpy.where', 'np.where', (['[(y == explainer.target_class) for y in y_in_sphere]'], {}), '([(y == explainer.target_class) for y in y_in_sphere])\n', (20516, 20570), True, 'import numpy as np\n'), ((21941, 22091), 'plot_functions.final_plot', 'final_plot', (['cf_list_two', 'cf_list_other', 'kmeans.cluster_centers_', 'counterfactual_instances_in_sphere', 'instances_in_sphere', 'multiclass', 'filename'], {}), '(cf_list_two, cf_list_other, kmeans.cluster_centers_,\n counterfactual_instances_in_sphere, instances_in_sphere, multiclass,\n filename)\n', (21951, 22091), False, 'from plot_functions import plot_hyperplane, plot_subfigure, final_plot, pick_anchors_informations, draw_rectangle, draw_linear_explanation\n'), ((22710, 22860), 'plot_functions.final_plot', 'final_plot', (['cf_list_two', 'cf_list_other', 'kmeans.cluster_centers_', 'counterfactual_instances_in_sphere', 'instances_in_sphere', 'multiclass', 'filename'], {}), '(cf_list_two, cf_list_other, kmeans.cluster_centers_,\n counterfactual_instances_in_sphere, instances_in_sphere, multiclass,\n filename)\n', (22720, 22860), False, 'from plot_functions import plot_hyperplane, plot_subfigure, final_plot, pick_anchors_informations, draw_rectangle, draw_linear_explanation\n'), ((23455, 23605), 'plot_functions.final_plot', 'final_plot', (['cf_list_two', 'cf_list_other', 'kmeans.cluster_centers_', 'counterfactual_instances_in_sphere', 'instances_in_sphere', 'multiclass', 'filename'], {}), '(cf_list_two, cf_list_other, kmeans.cluster_centers_,\n counterfactual_instances_in_sphere, instances_in_sphere, multiclass,\n filename)\n', (23465, 23605), False, 'from plot_functions import plot_hyperplane, plot_subfigure, final_plot, pick_anchors_informations, draw_rectangle, draw_linear_explanation\n'), ((18860, 18880), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (18878, 18880), False, 'from sklearn.linear_model import LogisticRegression\n'), ((20213, 20224), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (20222, 20224), False, 'import os\n')]
|
import numpy as np
import cv2
# convert a hexadecimal color to a BGR color array
def hex2bgr(h):
b = h & 0xFF
g = (h >> 8) & 0xFF
r = (h >> 16) & 0xFF
return np.array([b, g, r])
# predefined colors in BGR
MAROON = hex2bgr(0x800000)
RED = hex2bgr(0xFF0000)
ORANGE = hex2bgr(0xFFA500)
YELLOW = hex2bgr(0xFFFF00)
OLIVE = hex2bgr(0x808000)
GREEN = hex2bgr(0x008000)
PURPLE = hex2bgr(0x800080)
FUCHSIA = hex2bgr(0xFF00FF)
LIME = hex2bgr(0x00FF00)
TEAL = hex2bgr(0x008080)
AQUA = hex2bgr(0x00FFFF)
BLUE = hex2bgr(0x0000FF)
NAVY = hex2bgr(0x000080)
BLACK = hex2bgr(0x000000)
GRAY = hex2bgr(0x808080)
SILVER = hex2bgr(0xC0C0C0)
WHITE = hex2bgr(0xFFFFFF)
# convert a color into another colorspace
#
# arguments:
# * c: the color to convert. Can be a list, numpy array, or tuple.
# * code: the opencv color conversion code, e.g. cv2.COLOR_BGR2HSV
def cvtPixel(c, code):
img = np.array(c)
if img.dtype == np.float64:
img = img.astype(np.float32, copy=False)
elif img.dtype != np.float32:
img = img.astype(np.uint8, copy=False)
img = img.reshape((1, 1, -1))
converted = cv2.cvtColor(img, code)
return converted[0][0]
|
[
"cv2.cvtColor",
"numpy.array"
] |
[((175, 194), 'numpy.array', 'np.array', (['[b, g, r]'], {}), '([b, g, r])\n', (183, 194), True, 'import numpy as np\n'), ((928, 939), 'numpy.array', 'np.array', (['c'], {}), '(c)\n', (936, 939), True, 'import numpy as np\n'), ((1152, 1175), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'code'], {}), '(img, code)\n', (1164, 1175), False, 'import cv2\n')]
|
import itertools
from tempfile import NamedTemporaryFile
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from bokeh.plotting import figure, output_file
def get_validation_plot(true_value, prediction):
output_file(NamedTemporaryFile().name)
x_min = min(min(true_value), min(prediction))
x_max = max(max(true_value), max(prediction))
x_range = [x_min, x_max]
y_range = x_range
plot = figure(width=800, height=800,
x_range=x_range, y_range=y_range)
plot.xaxis.axis_label = "True value"
plot.xaxis.axis_label_text_font_size = '14pt'
plot.xaxis.major_label_text_font_size = '12pt'
plot.yaxis.axis_label = "Prediction"
plot.yaxis.axis_label_text_font_size = '14pt'
plot.yaxis.major_label_text_font_size = '12pt'
plot.circle(true_value, prediction)
plot.line(x_range, y_range, line_dash='dashed', color='gray')
return plot
def get_confusion_matrix_plot(confusion_matrix,
target_names,
title='Confusion matrix',
cmap=None,
normalize=True):
"""
given a sklearn confusion matrix (cm), make a nice plot
Arguments
---------
confusion_matrix: confusion matrix from sklearn.metrics.confusion_matrix
target_names: given classification classes such as [0, 1, 2]
the class names, for example: ['high', 'medium', 'low']
title: the text to display at the top of the matrix
cmap: the gradient of the values displayed from matplotlib.pyplot.cm
see http://matplotlib.org/examples/color/colormaps_reference.html
plt.get_cmap('jet') or plt.cm.Blues
normalize: If False, plot the raw numbers
If True, plot the proportions
Usage
-----
plot_confusion_matrix(cm = cm, # confusion matrix created by
# sklearn.metrics.confusion_matrix
normalize = True, # show proportions
target_names = y_labels_vals, # list of names of the classes
title = best_estimator_name) # title of graph
Citiation
---------
http://scikit-learn.org/stable/auto_examples/model_selection/plot_confusion_matrix.html
"""
if normalize:
confusion_matrix = confusion_matrix.astype('float') / confusion_matrix.sum()
accuracy = np.trace(confusion_matrix) / np.sum(confusion_matrix).astype('float')
misclass = 1 - accuracy
if cmap is None:
cmap = plt.get_cmap('Blues')
plt.figure(figsize=(8, 8))
plt.imshow(confusion_matrix, interpolation='nearest', cmap=cmap, vmin=0, vmax=1)
plt.title(title)
plt.colorbar()
if target_names is not None:
tick_marks = np.arange(len(target_names))
plt.xticks(tick_marks, target_names, rotation=45)
plt.yticks(tick_marks, target_names)
matplotlib.rcParams.update({'font.size': 20})
thresh = confusion_matrix.max() / 1.5 if normalize else confusion_matrix.max() / 2
for i, j in itertools.product(range(confusion_matrix.shape[0]), range(confusion_matrix.shape[1])):
if normalize:
plt.text(j, i, "{:0.4f}".format(confusion_matrix[i, j]),
horizontalalignment="center",
color="white" if confusion_matrix[i, j] > thresh else "black")
else:
plt.text(j, i, "{:,}".format(confusion_matrix[i, j]),
horizontalalignment="center",
color="white" if confusion_matrix[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label', fontsize=16)
plt.xlabel('Predicted label\naccuracy={:0.4f}; misclass={:0.4f}'.format(accuracy, misclass), fontsize=16)
return plt
|
[
"matplotlib.pyplot.title",
"tempfile.NamedTemporaryFile",
"numpy.trace",
"bokeh.plotting.figure",
"numpy.sum",
"matplotlib.pyplot.get_cmap",
"matplotlib.pyplot.imshow",
"matplotlib.rcParams.update",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.colorbar",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.tight_layout"
] |
[((432, 495), 'bokeh.plotting.figure', 'figure', ([], {'width': '(800)', 'height': '(800)', 'x_range': 'x_range', 'y_range': 'y_range'}), '(width=800, height=800, x_range=x_range, y_range=y_range)\n', (438, 495), False, 'from bokeh.plotting import figure, output_file\n'), ((2741, 2767), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (2751, 2767), True, 'import matplotlib.pyplot as plt\n'), ((2772, 2857), 'matplotlib.pyplot.imshow', 'plt.imshow', (['confusion_matrix'], {'interpolation': '"""nearest"""', 'cmap': 'cmap', 'vmin': '(0)', 'vmax': '(1)'}), "(confusion_matrix, interpolation='nearest', cmap=cmap, vmin=0, vmax=1\n )\n", (2782, 2857), True, 'import matplotlib.pyplot as plt\n'), ((2857, 2873), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (2866, 2873), True, 'import matplotlib.pyplot as plt\n'), ((2878, 2892), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (2890, 2892), True, 'import matplotlib.pyplot as plt\n'), ((3085, 3130), 'matplotlib.rcParams.update', 'matplotlib.rcParams.update', (["{'font.size': 20}"], {}), "({'font.size': 20})\n", (3111, 3130), False, 'import matplotlib\n'), ((3767, 3785), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (3783, 3785), True, 'import matplotlib.pyplot as plt\n'), ((3790, 3827), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""True label"""'], {'fontsize': '(16)'}), "('True label', fontsize=16)\n", (3800, 3827), True, 'import matplotlib.pyplot as plt\n'), ((2579, 2605), 'numpy.trace', 'np.trace', (['confusion_matrix'], {}), '(confusion_matrix)\n', (2587, 2605), True, 'import numpy as np\n'), ((2714, 2735), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""Blues"""'], {}), "('Blues')\n", (2726, 2735), True, 'import matplotlib.pyplot as plt\n'), ((2985, 3034), 'matplotlib.pyplot.xticks', 'plt.xticks', (['tick_marks', 'target_names'], {'rotation': '(45)'}), '(tick_marks, target_names, rotation=45)\n', (2995, 3034), True, 'import matplotlib.pyplot as plt\n'), ((3043, 3079), 'matplotlib.pyplot.yticks', 'plt.yticks', (['tick_marks', 'target_names'], {}), '(tick_marks, target_names)\n', (3053, 3079), True, 'import matplotlib.pyplot as plt\n'), ((241, 261), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {}), '()\n', (259, 261), False, 'from tempfile import NamedTemporaryFile\n'), ((2608, 2632), 'numpy.sum', 'np.sum', (['confusion_matrix'], {}), '(confusion_matrix)\n', (2614, 2632), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
import sys
import numpy as np
from netCDF4 import Dataset
with Dataset(sys.argv[1]) as nc1, Dataset(sys.argv[2]) as nc2:
if nc1.variables.keys()!=nc2.variables.keys():
print("Variables are different")
sys.exit(1)
for varname in nc1.variables.keys():
diff = nc2[varname][:]-nc1[varname][:]
if (np.abs(diff)).max() != 0:
print(varname,"is different")
sys.exit(1)
|
[
"netCDF4.Dataset",
"numpy.abs",
"sys.exit"
] |
[((87, 107), 'netCDF4.Dataset', 'Dataset', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (94, 107), False, 'from netCDF4 import Dataset\n'), ((116, 136), 'netCDF4.Dataset', 'Dataset', (['sys.argv[2]'], {}), '(sys.argv[2])\n', (123, 136), False, 'from netCDF4 import Dataset\n'), ((235, 246), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (243, 246), False, 'import sys\n'), ((406, 417), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (414, 417), False, 'import sys\n'), ((338, 350), 'numpy.abs', 'np.abs', (['diff'], {}), '(diff)\n', (344, 350), True, 'import numpy as np\n')]
|
"""
Functions to prepare the data for pciSeq. The label image and spots are parsed and if a spot
lies within the cell boundaries then the corresponding cell id is recorded.
Cell centroids and cell areas are also calculated.
"""
import os
import shutil
import numpy as np
import pandas as pd
import skimage.measure as skmeas
from typing import Tuple
from scipy.sparse import coo_matrix, csr_matrix, save_npz, load_npz
from pciSeq.src.preprocess.cell_borders import extract_borders_par, extract_borders_dip
import logging
dir_path = os.path.dirname(os.path.realpath(__file__))
logger = logging.getLogger()
# def inside_cell(label_image: np.array, idx: np.array) -> np.array:
# """
# :param label_image: An array of size height-by-width for the label image.
# :param idx: An array of size 2-by-N of the pixels coordinates of spot idx[k], k=1...N
# :return:
# a = np.array([ [4,0,1],
# [2,0,0],
# [0,1,0]])
#
# idx = np.array([[0,0],
# [2, 1],
# [1,2],
# [1,3]])
#
# inside_cell(a, idx.T) = [4., 1., 0., nan]
# which means that:
# spot with coords [0,0] lies inside cell 4
# spot with coords [2,0] lies inside cell 1
# spot with coords [1,2] is a background spot
# spot with coords [1,3] is outside the bounds and assigned to nan
#
# """
# assert isinstance(idx[0], np.ndarray), "Array 'idx' must be an array of arrays."
# idx = idx.astype(np.int64)
# out = np.array([])
# dim = np.ones(idx.shape[0], dtype=int)
# dim[:len(label_image.shape)] = label_image.shape
#
# # output array
# out = np.nan * np.ones(idx.shape[1], dtype=int)
#
# # find the ones within bounds:
# is_within = np.all(idx.T <= dim-1, axis=1)
#
# # also keep only non-negative ones
# is_positive = np.all(idx.T >= 0, axis=1)
#
# # filter array
# arr = idx[:, is_within & is_positive]
# flat_idx = np.ravel_multi_index(arr, dims=dim, order='C')
# out[is_within & is_positive] = label_image.ravel()[flat_idx]
#
# # if the matrix a is a coo_matrix then the following should be
# # equivalent (maybe better memory-wise since you do not have use
# # a proper array (no need to do coo.toarray())
# # out[is_within & is_positive] = a.tocsr(arr)
# # print('in label_spot')
#
# return out
def inside_cell(label_image, spots) -> np.array:
if isinstance(label_image, coo_matrix):
label_image = label_image.tocsr()
elif isinstance(label_image, np.ndarray):
label_image = csr_matrix(label_image)
elif isinstance(label_image, csr_matrix):
pass
else:
raise Exception('label_image should be of type "csr_matrix" ')
m = label_image[spots.y, spots.x]
out = np.asarray(m)
return out[0]
def remap_labels(coo):
"""
Used for debugging/sanity checking only. It resuffles the label_image
"""
coo_max = coo.data.max()
_keys = 1 + np.arange(coo_max)
_vals = _keys.copy()
np.random.shuffle(_vals)
d = dict(zip(_keys, _vals))
new_data = np.array([d[x] for x in coo.data]).astype(np.uint64)
out = coo_matrix((new_data, (coo.row, coo.col)), shape=coo.shape)
return out
def stage_data(spots: pd.DataFrame, coo: coo_matrix) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
"""
Reads the spots and the label image that are passed in and calculates which cell (if any) encircles any
given spot within its boundaries. It also retrieves the coordinates of the cell boundaries, the cell
centroids and the cell area
"""
logger.info(' Number of spots passed-in: %d' % spots.shape[0])
logger.info(' Number of segmented cells: %d' % len(set(coo.data)))
logger.info(' Segmentation array implies that image has width: %dpx and height: %dpx' % (coo.shape[1], coo.shape[0]))
mask_x = (spots.x >= 0) & (spots.x <= coo.shape[1])
mask_y = (spots.y >= 0) & (spots.y <= coo.shape[0])
spots = spots[mask_x & mask_y]
# Debugging code!
# resuffle
# spots = spots.sample(frac=1).reset_index(drop=True)
# _point = [5471-14, 110]
# logger.info('label at (y, x): (%d, %d) is %d' % (_point[0], _point[1], coo.toarray()[_point[0], _point[1]]))
# coo = remap_labels(coo)
# logger.info('remapped label at (y, x): (%d, %d) is %d' % (_point[0], _point[1], coo.toarray()[_point[0], _point[1]]))
# 1. Find which cell the spots lie within
# yx_coords = spots[['y', 'x']].values.T
inc = inside_cell(coo.tocsr(), spots)
spots = spots.assign(label=inc)
# 2. Get cell centroids and area
props = skmeas.regionprops(coo.toarray().astype(np.int32))
props_df = pd.DataFrame(data=[(d.label, d.area, d.centroid[1], d.centroid[0]) for d in props],
columns=['label', 'area', 'x_cell', 'y_cell'])
# 3. Get the cell boundaries
cell_boundaries = extract_borders_dip(coo.toarray().astype(np.uint32), 0, 0, [0])
assert props_df.shape[0] == cell_boundaries.shape[0] == coo.data.max()
assert set(spots.label[spots.label > 0]) <= set(props_df.label)
cells = props_df.merge(cell_boundaries)
cells.sort_values(by=['label', 'x_cell', 'y_cell'])
assert cells.shape[0] == cell_boundaries.shape[0] == props_df.shape[0]
# join spots and cells on the cell label so you can get the x,y coords of the cell for any given spot
spots = spots.merge(cells, how='left', on=['label'])
_cells = cells[['label', 'area', 'x_cell', 'y_cell']].rename(columns={'x_cell': 'x', 'y_cell': 'y'})
_cell_boundaries = cells[['label', 'coords']]
_spots = spots[['x', 'y', 'label', 'Gene', 'x_cell', 'y_cell']].rename(columns={'Gene': 'target', 'x': 'x_global', 'y': 'y_global'})
return _cells, _cell_boundaries, _spots
|
[
"pandas.DataFrame",
"numpy.asarray",
"os.path.realpath",
"logging.getLogger",
"scipy.sparse.coo_matrix",
"scipy.sparse.csr_matrix",
"numpy.arange",
"numpy.array",
"numpy.random.shuffle"
] |
[((586, 605), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (603, 605), False, 'import logging\n'), ((549, 575), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (565, 575), False, 'import os\n'), ((2835, 2848), 'numpy.asarray', 'np.asarray', (['m'], {}), '(m)\n', (2845, 2848), True, 'import numpy as np\n'), ((3075, 3099), 'numpy.random.shuffle', 'np.random.shuffle', (['_vals'], {}), '(_vals)\n', (3092, 3099), True, 'import numpy as np\n'), ((3210, 3269), 'scipy.sparse.coo_matrix', 'coo_matrix', (['(new_data, (coo.row, coo.col))'], {'shape': 'coo.shape'}), '((new_data, (coo.row, coo.col)), shape=coo.shape)\n', (3220, 3269), False, 'from scipy.sparse import coo_matrix, csr_matrix, save_npz, load_npz\n'), ((4743, 4877), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': '[(d.label, d.area, d.centroid[1], d.centroid[0]) for d in props]', 'columns': "['label', 'area', 'x_cell', 'y_cell']"}), "(data=[(d.label, d.area, d.centroid[1], d.centroid[0]) for d in\n props], columns=['label', 'area', 'x_cell', 'y_cell'])\n", (4755, 4877), True, 'import pandas as pd\n'), ((3027, 3045), 'numpy.arange', 'np.arange', (['coo_max'], {}), '(coo_max)\n', (3036, 3045), True, 'import numpy as np\n'), ((2623, 2646), 'scipy.sparse.csr_matrix', 'csr_matrix', (['label_image'], {}), '(label_image)\n', (2633, 2646), False, 'from scipy.sparse import coo_matrix, csr_matrix, save_npz, load_npz\n'), ((3147, 3181), 'numpy.array', 'np.array', (['[d[x] for x in coo.data]'], {}), '([d[x] for x in coo.data])\n', (3155, 3181), True, 'import numpy as np\n')]
|
import numpy
import PIL.Image
import torch.utils.data
import torchvision
import conf.config
import dataset.bak_dataset_utils
class VOCDataset(torch.utils.data.Dataset):
def __init__(self, config: dict, root: str, image_set: str, train: bool = True) -> None:
super().__init__()
self.config = config
self.voc2012_dataset = torchvision.datasets.VOCDetection(
root=root,
image_set=image_set,
)
self.transforms: dataset.bak_dataset_utils.Compose = dataset.bak_dataset_utils.get_transforms(self.config, train)
def __getitem__(self, idx: int) -> (PIL.Image.Image, numpy.ndarray):
(raw_image, raw_annotation) = self.voc2012_dataset[idx]
raw_target = []
for object in raw_annotation["annotation"]["object"]:
xmin, ymin, xmax, ymax = \
int(object["bndbox"]["xmin"]), \
int(object["bndbox"]["ymin"]), \
int(object["bndbox"]["xmax"]), \
int(object["bndbox"]["ymax"])
raw_x = (xmax + xmin) / 2
raw_y = (ymax + ymin) / 2
raw_w = xmax - xmin
raw_h = ymax - ymin
raw_target.append(
[
raw_x, raw_y, raw_w, raw_h,
self.config["labels"].index(object["name"]) if object["name"] in self.config["labels"] else -1
]
)
raw_target = numpy.asarray(raw_target)
# 5. 执行数据变换
scaled_image, scaled_target = self.transforms(raw_image, raw_target)
# 返回索引图像及其标签结果
return scaled_image, scaled_target
def __len__(self) -> int: # 获取数据集的长度
return len(self.voc2012_dataset)
def get_voc_dataloader(
config: dict,
image_set: str,
batch_size: int = 1,
train: bool = False,
shuffle: bool = False,
num_workers: int = 0,
) -> torch.utils.data.DataLoader:
voc_dataset = VOCDataset(
config=config,
root=conf.config.VocDatasetRoot,
image_set=image_set,
train=train
)
voc_dataloader = torch.utils.data.DataLoader(
voc_dataset,
batch_size=batch_size,
shuffle=shuffle,
num_workers=num_workers,
collate_fn=dataset.bak_dataset_utils.collate_fn,
drop_last=True
)
return voc_dataloader
def get_voc_train_dataloader(
config: dict,
batch_size: int = 1,
train: bool = False,
shuffle: bool = False,
num_workers: int = 0,
) -> torch.utils.data.DataLoader:
return get_voc_dataloader(
config,
"train",
batch_size,
train,
shuffle,
num_workers,
)
def get_voc_eval_dataloader(
config: dict,
batch_size: int = 1,
train: bool = False,
shuffle: bool = False,
num_workers: int = 0,
) -> torch.utils.data.DataLoader:
return get_voc_dataloader(
config,
"val",
batch_size,
train,
shuffle,
num_workers,
)
# -----------------------------------------------------------------------------------------------------------#
# Test
# -----------------------------------------------------------------------------------------------------------#
if __name__ == "__main__":
import conf.config
EPOCH = 2
voc_dataloader = get_voc_train_dataloader(
config=conf.config.VocConfig,
)
print(len(voc_dataloader))
print(len(get_voc_eval_dataloader(config=conf.config.VocConfig)))
for epoch in range(EPOCH):
print("Epoch:", epoch)
for step, (tensord_images, tensord_target_list) in enumerate(voc_dataloader):
print("step:", step)
print(tensord_images)
print(tensord_target_list)
exit(-1)
|
[
"numpy.asarray",
"torchvision.datasets.VOCDetection"
] |
[((354, 419), 'torchvision.datasets.VOCDetection', 'torchvision.datasets.VOCDetection', ([], {'root': 'root', 'image_set': 'image_set'}), '(root=root, image_set=image_set)\n', (387, 419), False, 'import torchvision\n'), ((1438, 1463), 'numpy.asarray', 'numpy.asarray', (['raw_target'], {}), '(raw_target)\n', (1451, 1463), False, 'import numpy\n')]
|
# to build the phylogeny:
# - andi *.fasta > ecoli.distances
# - python bin/fix_andi_names.py accession_list < ../results/ecoli.distances > ../results/ecoli.distances.fixed # turns andi prefixes into genome names
# - python bin/draw_phylogeny.py ../results/ecoli_matrix_2.map [full.path.to]/ecoli.distances.fixed ../results/EDL933/calculate_bias.EDL933.* > maketree.R
# - run maketree.R to plot the phylogeny
# - also generated is gradient.pdf which can be used as the colour key
#
# usage
# draw_phylogeny genome_map distance_matrix resultfiles > Rcode
#
import sys
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors
import matplotlib.cm
#import matplotlib.colors as mcolors
def parse( src, distances, results ):
info = {}
for line in open(src, 'r'):
fields = line.strip().split(',')
if len(fields) > 2:
accession = fields[0].split('/')[-1][:-6]
info[accession] = { 'n': fields[1], 'p': fields[2] }
#print 'added', accession, 'with', fields[1]
records = []
for target in results:
#print target
fn_fields = target.split('/')[-1].split( '.' )
accession = '.'.join( [ fn_fields[2], fn_fields[3] ] )
for line in open( target, 'r' ):
if line.startswith( 'ESTIMATED' ):
fields = line.strip().split()
low = float( fields[2] )
mid = float( fields[4] )
high = float( fields[6] )
#print 'name', info[accession]['n'], 'given', accession
record = { 'a': accession, 'l': low, 'm': mid, 'h': high, 'p': info[accession]['p'], 'n': info[accession]['n'] }
records.append( record )
return info, records
def make_colour_code( info, distances, records, out_fh ):
max_bias = 20.
for record in records:
max_bias = max( max_bias, record['m'] )
sys.stderr.write( 'max bias {0}\n'.format( max_bias ) )
max_bias = ( max_bias + 5 ) - ( max_bias % 5 ) # round to 5
out_fh.write( '## max bias is {0}\n'.format( max_bias ) )
#conv = matplotlib.cm.RdYlGn
#conv = matplotlib.cm.winter
conv = matplotlib.colors.LinearSegmentedColormap.from_list(name='gor', colors =['green', 'orange', 'red'], N=16)
# index of data
#mid_i = fs.index('mid')
#rnm_i = fs.index('Reference not covered')
#rl_i = fs.index('Reference Length')
out_fh.write( 'labelCol <- function(x) {\n' )
out_fh.write( 'if (is.leaf(x)) {\n' )
out_fh.write( '## fetch label\n' )
out_fh.write( 'label <- attr(x, "label") \n' )
out_fh.write( 'attr(x, "nodePar") <- list(cex=0.5, lab.cex = 0.6, pch=15, bg="#ff0000")\n' )
out_fh.write( '## set label color to red for A and B, to blue otherwise\n' )
for record in records:
name = record['n']
# data value
#bias = float(fs[mid_i])
bias = record['m']
#new_name = re.sub( '(Escherichia coli|strain|whole genome shotgun sequence|complete sequence|DNA|str\\.|complete genome|chromosome|,|\'|E2348/69 )', '', name ).strip()
#new_name = re.sub( ' *', ' ', new_name )
color = matplotlib.colors.rgb2hex( conv( bias / max_bias ) )
out_fh.write( 'if (label == "%s") { attr(x, "nodePar") <- list(cex=1.0, lab.cex = 0.6, pch = 19, lab.col="%s", bg="%s", col="%s") }\n' % ( name, color, color, color ) )
out_fh.write( '}\n' )
out_fh.write( 'return(x)\n' )
out_fh.write( '}\n' )
# write out clustering code
out_fh.write( 'distances = read.csv(file = \'{0}\', header = T, row.names= 1)\n'.format( distances ) )
out_fh.write( 'hc <- hclust(as.dist(distances))\n' )
out_fh.write( 'd <- dendrapply(as.dendrogram(hc, hang=0.1), labelCol)\n' )
out_fh.write( 'plot(d, horiz=F)\n' )
# draw scale
fig = plt.figure()
ax = fig.add_subplot(111)
a = np.linspace(0, 1, 256).reshape(1,-1)
a = np.vstack((a,a))
ax.imshow(a, aspect='auto', cmap=conv)
fig.savefig('gradient.pdf', format='pdf', dpi=1000)
if __name__ == '__main__':
info, records = parse( sys.argv[1], sys.argv[2], sys.argv[3:] )
make_colour_code( info, sys.argv[2], records, sys.stdout )
|
[
"numpy.linspace",
"matplotlib.pyplot.figure",
"numpy.vstack"
] |
[((3595, 3607), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3605, 3607), True, 'import matplotlib.pyplot as plt\n'), ((3688, 3705), 'numpy.vstack', 'np.vstack', (['(a, a)'], {}), '((a, a))\n', (3697, 3705), True, 'import numpy as np\n'), ((3645, 3667), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(256)'], {}), '(0, 1, 256)\n', (3656, 3667), True, 'import numpy as np\n')]
|
"""Module containing the decoders."""
import numpy as np
import torch
import torch.nn as nn
import pdb
class DecoderBurgess(nn.Module):
def __init__(self, img_size,
latent_dim=10):
r"""Decoder of the model proposed in [1].
Parameters
----------
img_size : tuple of ints
Size of images. E.g. (1, 32, 32) or (3, 64, 64).
latent_dim : int
Dimensionality of latent output.
Model Architecture (transposed for decoder)
------------
- 4 convolutional layers (each with 32 channels), (4 x 4 kernel),
(stride of 2)
- 2 fully connected layers (each of 256 units)
- Latent distribution:
- 1 fully connected layer of 20 units (log variance and mean for
10 Gaussians)
References:
[1] Burgess, <NAME>., et al. "Understanding disentangling in
$\beta$-VAE." arXiv preprint arXiv:1804.03599 (2018).
"""
super(DecoderBurgess, self).__init__()
# Layer parameters
hid_channels = 32
kernel_size = 4
hidden_dim = 256
self.img_size = img_size
# Shape required to start transpose convs
self.reshape = (hid_channels, kernel_size, kernel_size)
n_chan = self.img_size[0]
self.img_size = img_size
# Fully connected layers
self.lin1 = nn.Linear(latent_dim, hidden_dim)
self.lin2 = nn.Linear(hidden_dim, hidden_dim)
self.lin3 = nn.Linear(hidden_dim, np.product(self.reshape))
# Convolutional layers
cnn_kwargs = dict(stride=2, padding=1)
# If input image is 64x64 do fourth convolution
if self.img_size[1] == self.img_size[2] == 64:
self.convT_64 = nn.ConvTranspose2d(
hid_channels, hid_channels, kernel_size, **cnn_kwargs)
self.convT1 = nn.ConvTranspose2d(
hid_channels, hid_channels, kernel_size, **cnn_kwargs)
self.convT2 = nn.ConvTranspose2d(
hid_channels, hid_channels, kernel_size, **cnn_kwargs)
self.convT3 = nn.ConvTranspose2d(
hid_channels, n_chan, kernel_size, **cnn_kwargs)
def forward(self, z):
batch_size = z.size(0)
# Fully connected layers with ReLu activations
x = torch.relu(self.lin1(z))
x = torch.relu(self.lin2(x))
x = torch.relu(self.lin3(x))
x = x.view(batch_size, *self.reshape)
# Convolutional layers with ReLu activations
if self.img_size[1] == self.img_size[2] == 64:
x = torch.relu(self.convT_64(x))
x = torch.relu(self.convT1(x))
x = torch.relu(self.convT2(x))
# Sigmoid activation for final conv layer
x = torch.sigmoid(self.convT3(x))
return x
class DecoderRezendeViola(nn.Module):
def __init__(self, img_size,
latent_dim=10):
r"""Decoder of the model used in [1].
Parameters
----------
img_size : tuple of ints
Size of images. E.g. (1, 32, 32) or (3, 64, 64).
latent_dim : int
Dimensionality of latent output.
Model Architecture (transposed for decoder)
------------
- 4 convolutional layers (each with 32 channels), (4 x 4 kernel),
(stride of 2)
- 2 fully connected layers (each of 256 units)
- Latent distribution:
- 1 fully connected layer of 20 units (log variance and mean for
10 Gaussians)
References:
[1] <NAME> Rezende and <NAME>. <NAME>, 2018.
"""
super(DecoderRezendeViola, self).__init__()
# Layer parameters
hid_channels = 32
kernel_size = 4
hidden_dim = 256
self.img_size = img_size
# Shape required to start transpose convs
self.reshape = (hid_channels, kernel_size, kernel_size)
n_chan = self.img_size[0]
self.img_size = img_size
# Fully connected layers
self.lin1 = nn.Linear(latent_dim, hidden_dim)
self.lin2 = nn.Linear(hidden_dim, hidden_dim)
self.lin3 = nn.Linear(hidden_dim, np.product(self.reshape))
# Convolutional layers
cnn_kwargs = dict(stride=2, padding=1)
# If input image is 64x64 do fourth convolution
if self.img_size[1] == self.img_size[2] == 64:
self.convT_64 = nn.ConvTranspose2d(
hid_channels, hid_channels, kernel_size, **cnn_kwargs)
self.convT1 = nn.ConvTranspose2d(
hid_channels, hid_channels, kernel_size, **cnn_kwargs)
self.convT2 = nn.ConvTranspose2d(
hid_channels, hid_channels, kernel_size, **cnn_kwargs)
self.convT3 = nn.ConvTranspose2d(
hid_channels, 2 * n_chan, kernel_size, **cnn_kwargs)
def reparameterize(self, mean, logvar):
"""
Samples from a normal distribution using the reparameterization trick.
Parameters
----------
mean : torch.Tensor
Mean of the normal distribution. Shape (batch_size, latent_dim)
logvar : torch.Tensor
Diagonal log variance of the normal distribution.
Shape : (batch_size, latent_dim)
"""
if self.training:
std = torch.exp(0.5 * logvar)
# std = torch.zeros_like(mean) + 0.25
eps = torch.randn_like(std)
return mean + std * eps
else:
return mean
def forward(self, z):
batch_size = z.size(0)
# Fully connected layers with ReLu activations
x = torch.relu(self.lin1(z))
x = torch.relu(self.lin2(x))
x = torch.relu(self.lin3(x))
x = x.view(batch_size, *self.reshape)
# Convolutional layers with ReLu activations
if self.img_size[1] == self.img_size[2] == 64:
x = torch.relu(self.convT_64(x))
x = torch.relu(self.convT1(x))
x = torch.relu(self.convT2(x))
# Sigmoid activation for final conv layer
x = torch.sigmoid(self.convT3(x))
out = self.reparameterize(x[:,0,:,:].view(-1, self.img_size[0],self.img_size[1], self.img_size[2]), x[:,1,:,:].view(-1, self.img_size[0],self.img_size[1], self.img_size[2]))
return out
class IntegrationDecoderCNCVAE(nn.Module):
def __init__(self, data_size, latent_dim=16, dense_units=128):
r"""Encoder of the concatanation VAE [1].
Parameters
----------
data_size : int
Dimensionality of the input data
dense_units : int
Number of units for the dense layer
latent_dim : int
Dimensionality of latent output.
Model Architecture (transposed for decoder)
------------
- 1 fully connected layer with units defined by dense_units
- Latent distribution:
- 1 fully connected layer of latent_dim units (log variance and mean for
10 Gaussians)
References:
[1] Simidjievski, Nikola et al. “Variational Autoencoders for Cancer
Data Integration: Design Principles and Computational Practice.”
Frontiers in genetics vol. 10 1205. 11 Dec. 2019,
doi:10.3389/fgene.2019.01205
"""
super(IntegrationDecoderCNCVAE, self).__init__()
self.data_size = data_size
self.dense_units = dense_units
self.latent_dim = latent_dim
# define decoding layers
self.de_embed = nn.Linear(self.latent_dim, self.dense_units)
self.decode = nn.Linear(self.dense_units, self.self.latent_dim)
def forward(self, z):
hidden = self.de_embed(z)
x = self.decode(hidden)
return x
|
[
"torch.nn.ConvTranspose2d",
"torch.randn_like",
"torch.exp",
"numpy.product",
"torch.nn.Linear"
] |
[((1408, 1441), 'torch.nn.Linear', 'nn.Linear', (['latent_dim', 'hidden_dim'], {}), '(latent_dim, hidden_dim)\n', (1417, 1441), True, 'import torch.nn as nn\n'), ((1462, 1495), 'torch.nn.Linear', 'nn.Linear', (['hidden_dim', 'hidden_dim'], {}), '(hidden_dim, hidden_dim)\n', (1471, 1495), True, 'import torch.nn as nn\n'), ((1896, 1969), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['hid_channels', 'hid_channels', 'kernel_size'], {}), '(hid_channels, hid_channels, kernel_size, **cnn_kwargs)\n', (1914, 1969), True, 'import torch.nn as nn\n'), ((2005, 2078), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['hid_channels', 'hid_channels', 'kernel_size'], {}), '(hid_channels, hid_channels, kernel_size, **cnn_kwargs)\n', (2023, 2078), True, 'import torch.nn as nn\n'), ((2114, 2181), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['hid_channels', 'n_chan', 'kernel_size'], {}), '(hid_channels, n_chan, kernel_size, **cnn_kwargs)\n', (2132, 2181), True, 'import torch.nn as nn\n'), ((4045, 4078), 'torch.nn.Linear', 'nn.Linear', (['latent_dim', 'hidden_dim'], {}), '(latent_dim, hidden_dim)\n', (4054, 4078), True, 'import torch.nn as nn\n'), ((4099, 4132), 'torch.nn.Linear', 'nn.Linear', (['hidden_dim', 'hidden_dim'], {}), '(hidden_dim, hidden_dim)\n', (4108, 4132), True, 'import torch.nn as nn\n'), ((4533, 4606), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['hid_channels', 'hid_channels', 'kernel_size'], {}), '(hid_channels, hid_channels, kernel_size, **cnn_kwargs)\n', (4551, 4606), True, 'import torch.nn as nn\n'), ((4642, 4715), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['hid_channels', 'hid_channels', 'kernel_size'], {}), '(hid_channels, hid_channels, kernel_size, **cnn_kwargs)\n', (4660, 4715), True, 'import torch.nn as nn\n'), ((4751, 4822), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['hid_channels', '(2 * n_chan)', 'kernel_size'], {}), '(hid_channels, 2 * n_chan, kernel_size, **cnn_kwargs)\n', (4769, 4822), True, 'import torch.nn as nn\n'), ((7561, 7605), 'torch.nn.Linear', 'nn.Linear', (['self.latent_dim', 'self.dense_units'], {}), '(self.latent_dim, self.dense_units)\n', (7570, 7605), True, 'import torch.nn as nn\n'), ((7628, 7677), 'torch.nn.Linear', 'nn.Linear', (['self.dense_units', 'self.self.latent_dim'], {}), '(self.dense_units, self.self.latent_dim)\n', (7637, 7677), True, 'import torch.nn as nn\n'), ((1538, 1562), 'numpy.product', 'np.product', (['self.reshape'], {}), '(self.reshape)\n', (1548, 1562), True, 'import numpy as np\n'), ((1782, 1855), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['hid_channels', 'hid_channels', 'kernel_size'], {}), '(hid_channels, hid_channels, kernel_size, **cnn_kwargs)\n', (1800, 1855), True, 'import torch.nn as nn\n'), ((4175, 4199), 'numpy.product', 'np.product', (['self.reshape'], {}), '(self.reshape)\n', (4185, 4199), True, 'import numpy as np\n'), ((4419, 4492), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['hid_channels', 'hid_channels', 'kernel_size'], {}), '(hid_channels, hid_channels, kernel_size, **cnn_kwargs)\n', (4437, 4492), True, 'import torch.nn as nn\n'), ((5317, 5340), 'torch.exp', 'torch.exp', (['(0.5 * logvar)'], {}), '(0.5 * logvar)\n', (5326, 5340), False, 'import torch\n'), ((5409, 5430), 'torch.randn_like', 'torch.randn_like', (['std'], {}), '(std)\n', (5425, 5430), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import numpy as np
import sklearn.neighbors as sn
import matplotlib.pyplot as mp
train_x, train_y = [], []
with open('../../data/knn.txt', 'r') as f:
for line in f.readlines():
data = [float(substr) for substr
in line.split(',')]
train_x.append(data[:-1])
train_y.append(data[-1])
train_x = np.array(train_x)
train_y = np.array(train_y, dtype=int)
model = sn.KNeighborsClassifier(
n_neighbors=10, weights='distance')
model.fit(train_x, train_y)
l, r, h = train_x[:, 0].min() - 1, \
train_x[:, 0].max() + 1, 0.005
b, t, v = train_x[:, 1].min() - 1, \
train_x[:, 1].max() + 1, 0.005
grid_x = np.meshgrid(np.arange(l, r, h),
np.arange(b, t, v))
flat_x = np.c_[grid_x[0].ravel(), grid_x[1].ravel()]
flat_y = model.predict(flat_x)
grid_y = flat_y.reshape(grid_x[0].shape)
test_x = np.array([
[2.2, 6.2],
[3.6, 1.8],
[4.5, 3.6]])
pred_test_y = model.predict(test_x)
_, nn_indices = model.kneighbors(test_x)
mp.figure('KNN Classification', facecolor='lightgray')
mp.title('KNN Classification', fontsize=20)
mp.xlabel('x', fontsize=14)
mp.ylabel('y', fontsize=14)
mp.tick_params(labelsize=10)
mp.pcolormesh(grid_x[0], grid_x[1], grid_y, cmap='gray')
classes = np.unique(train_y)
classes.sort()
cs = mp.get_cmap('brg', len(classes))(classes)
mp.scatter(train_x[:, 0], train_x[:, 1],
c=cs[train_y], s=60)
mp.scatter(test_x[:, 0], test_x[:, 1], marker='D',
c=cs[pred_test_y], s=60)
for nn_index, y in zip(nn_indices, pred_test_y):
mp.scatter(
train_x[nn_index, 0], train_x[nn_index, 1],
marker='D',
edgecolor=cs[np.ones_like(nn_index) * y],
facecolor='none', s=180)
mp.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"numpy.ones_like",
"matplotlib.pyplot.scatter",
"sklearn.neighbors.KNeighborsClassifier",
"matplotlib.pyplot.figure",
"numpy.array",
"numpy.arange",
"matplotlib.pyplot.pcolormesh",
"matplotlib.pyplot.tick_params",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"numpy.unique"
] |
[((399, 416), 'numpy.array', 'np.array', (['train_x'], {}), '(train_x)\n', (407, 416), True, 'import numpy as np\n'), ((427, 455), 'numpy.array', 'np.array', (['train_y'], {'dtype': 'int'}), '(train_y, dtype=int)\n', (435, 455), True, 'import numpy as np\n'), ((464, 523), 'sklearn.neighbors.KNeighborsClassifier', 'sn.KNeighborsClassifier', ([], {'n_neighbors': '(10)', 'weights': '"""distance"""'}), "(n_neighbors=10, weights='distance')\n", (487, 523), True, 'import sklearn.neighbors as sn\n'), ((917, 963), 'numpy.array', 'np.array', (['[[2.2, 6.2], [3.6, 1.8], [4.5, 3.6]]'], {}), '([[2.2, 6.2], [3.6, 1.8], [4.5, 3.6]])\n', (925, 963), True, 'import numpy as np\n'), ((1054, 1108), 'matplotlib.pyplot.figure', 'mp.figure', (['"""KNN Classification"""'], {'facecolor': '"""lightgray"""'}), "('KNN Classification', facecolor='lightgray')\n", (1063, 1108), True, 'import matplotlib.pyplot as mp\n'), ((1109, 1152), 'matplotlib.pyplot.title', 'mp.title', (['"""KNN Classification"""'], {'fontsize': '(20)'}), "('KNN Classification', fontsize=20)\n", (1117, 1152), True, 'import matplotlib.pyplot as mp\n'), ((1153, 1180), 'matplotlib.pyplot.xlabel', 'mp.xlabel', (['"""x"""'], {'fontsize': '(14)'}), "('x', fontsize=14)\n", (1162, 1180), True, 'import matplotlib.pyplot as mp\n'), ((1181, 1208), 'matplotlib.pyplot.ylabel', 'mp.ylabel', (['"""y"""'], {'fontsize': '(14)'}), "('y', fontsize=14)\n", (1190, 1208), True, 'import matplotlib.pyplot as mp\n'), ((1209, 1237), 'matplotlib.pyplot.tick_params', 'mp.tick_params', ([], {'labelsize': '(10)'}), '(labelsize=10)\n', (1223, 1237), True, 'import matplotlib.pyplot as mp\n'), ((1238, 1294), 'matplotlib.pyplot.pcolormesh', 'mp.pcolormesh', (['grid_x[0]', 'grid_x[1]', 'grid_y'], {'cmap': '"""gray"""'}), "(grid_x[0], grid_x[1], grid_y, cmap='gray')\n", (1251, 1294), True, 'import matplotlib.pyplot as mp\n'), ((1305, 1323), 'numpy.unique', 'np.unique', (['train_y'], {}), '(train_y)\n', (1314, 1323), True, 'import numpy as np\n'), ((1386, 1447), 'matplotlib.pyplot.scatter', 'mp.scatter', (['train_x[:, 0]', 'train_x[:, 1]'], {'c': 'cs[train_y]', 's': '(60)'}), '(train_x[:, 0], train_x[:, 1], c=cs[train_y], s=60)\n', (1396, 1447), True, 'import matplotlib.pyplot as mp\n'), ((1459, 1534), 'matplotlib.pyplot.scatter', 'mp.scatter', (['test_x[:, 0]', 'test_x[:, 1]'], {'marker': '"""D"""', 'c': 'cs[pred_test_y]', 's': '(60)'}), "(test_x[:, 0], test_x[:, 1], marker='D', c=cs[pred_test_y], s=60)\n", (1469, 1534), True, 'import matplotlib.pyplot as mp\n'), ((1766, 1775), 'matplotlib.pyplot.show', 'mp.show', ([], {}), '()\n', (1773, 1775), True, 'import matplotlib.pyplot as mp\n'), ((722, 740), 'numpy.arange', 'np.arange', (['l', 'r', 'h'], {}), '(l, r, h)\n', (731, 740), True, 'import numpy as np\n'), ((763, 781), 'numpy.arange', 'np.arange', (['b', 't', 'v'], {}), '(b, t, v)\n', (772, 781), True, 'import numpy as np\n'), ((1704, 1726), 'numpy.ones_like', 'np.ones_like', (['nn_index'], {}), '(nn_index)\n', (1716, 1726), True, 'import numpy as np\n')]
|
"""
Original code based on Kaggle competition
Modified to take 3-channel input
"""
from __future__ import division
import numpy as np
from keras.models import Model
from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D
from keras import backend as K
import keras
import h5py
from keras.layers.normalization import BatchNormalization
from keras.optimizers import Nadam
from keras.callbacks import History
import pandas as pd
from keras.backend import binary_crossentropy
import datetime
import os
import random
import threading
import tensorflow as tf
from keras.models import model_from_json
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.9
set_session(tf.Session(config=config))
img_rows = 112
img_cols = 112
smooth = 1e-12
num_channels = 3
num_mask_channels = 1
random.seed(0)
def jaccard_coef(y_true, y_pred):
intersection = K.sum(y_true * y_pred, axis=[0, -1, -2])
sum_ = K.sum(y_true + y_pred, axis=[0, -1, -2])
jac = (intersection + smooth) / (sum_ - intersection + smooth)
return K.mean(jac)
def jaccard_coef_int(y_true, y_pred):
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
intersection = K.sum(y_true * y_pred_pos, axis=[0, -1, -2])
sum_ = K.sum(y_true + y_pred_pos, axis=[0, -1, -2])
jac = (intersection + smooth) / (sum_ - intersection + smooth)
return K.mean(jac)
def jaccard_coef_loss(y_true, y_pred):
return -K.log(jaccard_coef(y_true, y_pred)) + binary_crossentropy(y_pred, y_true)
def get_unet0():
inputs = Input((num_channels, img_rows, img_cols))
conv1a = BatchNormalization(mode=0, axis=1)(inputs)
conv1a = Convolution2D(12, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv1a)
conv1a = keras.layers.advanced_activations.ELU()(conv1a)
conv1b = BatchNormalization(mode=0, axis=1)(conv1a)
conv1b = Convolution2D(12, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv1b)
conv1b = keras.layers.advanced_activations.ELU()(conv1b)
conc1 = merge([conv1a,conv1b], mode = 'concat', concat_axis = 1)
pool1 = keras.layers.AveragePooling2D(pool_size=(2, 2),dim_ordering='th')(conc1)
conv2a = BatchNormalization(mode=0, axis=1)(pool1)
conv2a = Convolution2D(24, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv2a)
conv2a = keras.layers.advanced_activations.ELU()(conv2a)
conv2b = BatchNormalization(mode=0, axis=1)(conv2a)
conv2b = Convolution2D(24, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv2b)
conc2 = merge([conv2a,conv2b], mode = 'concat', concat_axis = 1)
pool2 = Convolution2D(24,1,1,border_mode='same',init='he_uniform',dim_ordering='th')(conc2)
pool2 = AveragePooling2D(pool_size=(2, 2),dim_ordering='th')(conc2)
conv3a = BatchNormalization(mode=0, axis=1)(pool2)
conv3a = Convolution2D(48, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv3a)
conv3a = keras.layers.advanced_activations.ELU()(conv3a)
conv3b = BatchNormalization(mode=0, axis=1)(conv3a)
conv3b = Convolution2D(48, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv3b)
conv3b = keras.layers.advanced_activations.ELU()(conv3b)
conc3 = merge([conv3a,conv3b], mode = 'concat', concat_axis = 1)
pool3 = Convolution2D(48,1,1,border_mode='same',init='he_uniform',dim_ordering='th')(conc3)
pool3 = AveragePooling2D(pool_size=(2, 2),dim_ordering='th')(conc3)
conv4a = BatchNormalization(mode=0, axis=1)(pool3)
conv4a = Convolution2D(96, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv4a)
conv4a = keras.layers.advanced_activations.ELU()(conv4a)
conv4b = BatchNormalization(mode=0, axis=1)(conv4a)
conv4b = Convolution2D(96, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv4b)
conv4b = keras.layers.advanced_activations.ELU()(conv4b)
conc4 = merge([conv4a,conv4b], mode = 'concat', concat_axis = 1)
pool4 = Convolution2D(96,1,1,border_mode='same',init='he_uniform',dim_ordering='th')(conc4)
pool4 = AveragePooling2D(pool_size=(2, 2),dim_ordering='th')(conv4b)
conv5a = BatchNormalization(mode=0, axis=1)(pool4)
conv5a = Convolution2D(192, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv5a)
conv5a = keras.layers.advanced_activations.ELU()(conv5a)
conv5b = BatchNormalization(mode=0, axis=1)(conv5a)
conv5b = Convolution2D(192, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv5b)
conv5b = keras.layers.advanced_activations.ELU()(conv5b)
conc5 = merge([conv5a,conv5b], mode = 'concat', concat_axis = 1)
up6 = merge([UpSampling2D(size=(2, 2),dim_ordering='th')(conc5), conc4], mode='concat', concat_axis=1)
conv6a = BatchNormalization(mode=0, axis=1)(up6)
conv6a = Convolution2D(96, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv6a)
conv6a = keras.layers.advanced_activations.ELU()(conv6a)
up6a = merge([up6, conv6a], mode = 'concat', concat_axis = 1)
conv6b = BatchNormalization(mode=0, axis=1)(up6a)
conv6b = Convolution2D(96, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv6b)
conv6b = keras.layers.advanced_activations.ELU()(conv6b)
conc6 = merge([up6a,conv6b], mode = 'concat', concat_axis = 1)
conc6 = Convolution2D(96,1,1,border_mode='same',init='he_uniform',dim_ordering='th')(conc6)
up7 = merge([UpSampling2D(size=(2, 2),dim_ordering='th')(conc6), conc3], mode='concat', concat_axis=1)
conv7a = BatchNormalization(mode=0, axis=1)(up7)
conv7a = Convolution2D(48, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv7a)
conv7a = keras.layers.advanced_activations.ELU()(conv7a)
up7a = merge([up7, conv7a], mode = 'concat', concat_axis = 1)
conv7b = BatchNormalization(mode=0, axis=1)(up7a)
conv7b = Convolution2D(48, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv7b)
conv7b = keras.layers.advanced_activations.ELU()(conv7b)
conc7 = merge([up7a,conv7b], mode = 'concat', concat_axis = 1)
conc7 = Convolution2D(48,1,1,border_mode='same',init='he_uniform',dim_ordering='th')(conc7)
up8 = merge([UpSampling2D(size=(2, 2),dim_ordering='th')(conc7), conc2], mode='concat', concat_axis=1)
conv8a = BatchNormalization(mode=0, axis=1)(up8)
conv8a = Convolution2D(24, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv8a)
conv8a = keras.layers.advanced_activations.ELU()(conv8a)
up8a = merge([up8, conv8a], mode = 'concat', concat_axis = 1)
conv8b = BatchNormalization(mode=0, axis=1)(up8a)
conv8b = Convolution2D(24, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv8b)
conv8b = keras.layers.advanced_activations.ELU()(conv8b)
conc8 = merge([up8a,conv8b], mode = 'concat', concat_axis = 1)
conc8 = Convolution2D(24,1,1,border_mode='same',init='he_uniform',dim_ordering='th')(conc8)
up9 = merge([UpSampling2D(size=(2, 2),dim_ordering='th')(conc8), conc1], mode='concat', concat_axis=1)
conv9a = BatchNormalization(mode=0, axis=1)(up9)
conv9a = Convolution2D(12, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv9a)
conv9a = keras.layers.advanced_activations.ELU()(conv9a)
up9a = merge([up9, conv9a], mode = 'concat', concat_axis = 1)
conv9b = BatchNormalization(mode=0, axis=1)(up9a)
conv9b = Convolution2D(12, 3, 3, border_mode='same', init='he_uniform',dim_ordering='th')(conv9b)
conv9b = keras.layers.advanced_activations.ELU()(conv9b)
conc9 = merge([up9a,conv9b], mode = 'concat', concat_axis = 1)
crop9 = Cropping2D(cropping=((16, 16), (16, 16)),dim_ordering='th')(conc9)
conv9 = BatchNormalization(mode=0, axis=1)(crop9)
conv9 = Convolution2D(12, 3, 3, border_mode='same',init='he_uniform',dim_ordering='th')(conv9)
conv9 = keras.layers.advanced_activations.ELU()(conv9)
conv10 = Convolution2D(num_mask_channels, 1, 1, activation='sigmoid',dim_ordering='th')(conv9)
model = Model(input=inputs, output=conv10)
return model
def flip_axis(x, axis):
x = np.asarray(x).swapaxes(axis, 0)
x = x[::-1, ...]
x = x.swapaxes(0, axis)
return x
def form_batch(X, y, batch_size):
X_batch = np.zeros((batch_size, num_channels, img_rows, img_cols))
y_batch = np.zeros((batch_size, num_mask_channels, img_rows, img_cols))
X_height = X.shape[2]
X_width = X.shape[3]
for i in range(batch_size):
random_width = random.randint(0, X_width - img_cols - 1)
random_height = random.randint(0, X_height - img_rows - 1)
random_image = random.randint(0, X.shape[0] - 1)
y_batch[i] = y[random_image, :, random_height: random_height + img_rows, random_width: random_width + img_cols]
X_batch[i] = np.array(X[random_image, :, random_height: random_height + img_rows, random_width: random_width + img_cols])
return X_batch, y_batch
class threadsafe_iter:
"""Takes an iterator/generator and makes it thread-safe by
serializing call to the `next` method of given iterator/generator.
"""
def __init__(self, it):
self.it = it
self.lock = threading.Lock()
def __iter__(self):
return self
def next(self):
with self.lock:
return self.it.next()
def threadsafe_generator(f):
"""A decorator that takes a generator function and makes it thread-safe.
"""
def g(*a, **kw):
yield threadsafe_iter(f(*a, **kw))
return g
def batch_generator(X, y, batch_size, horizontal_flip=False, vertical_flip=False, swap_axis=False):
while True:
X_batch, y_batch = form_batch(X, y, batch_size)
for i in range(X_batch.shape[0]):
xb = X_batch[i]
yb = y_batch[i]
if horizontal_flip:
if np.random.random() < 0.5:
xb = flip_axis(xb, 1)
yb = flip_axis(yb, 1)
if vertical_flip:
if np.random.random() < 0.5:
xb = flip_axis(xb, 2)
yb = flip_axis(yb, 2)
if swap_axis:
if np.random.random() < 0.5:
xb = xb.swapaxes(1, 2)
yb = yb.swapaxes(1, 2)
X_batch[i] = xb
y_batch[i] = yb
yield X_batch, y_batch[:, :, 16:16 + img_rows - 32, 16:16 + img_cols - 32]
def save_model(model, cross):
json_string = model.to_json()
if not os.path.isdir('cache'):
os.mkdir('cache')
json_name = 'architecture_densenet' + cross + '.json'
weight_name = 'model_weights_densenet' + cross + '.h5'
open(os.path.join('cache', json_name), 'w').write(json_string)
model.save_weights(os.path.join('cache', weight_name), overwrite=True)
def save_history(history, suffix):
filename = 'history/history_' + suffix + '.csv'
pd.DataFrame(history.history).to_csv(filename, index=False)
def read_model(cross=''):
json_name = 'architecture_densenet' + cross + '.json'
weight_name = 'model_weights_densenet' + cross + '.h5'
model = model_from_json(open(os.path.join('../src/cache', json_name)).read())
model.load_weights(os.path.join('../src/cache', weight_name))
return model
if __name__ == '__main__':
data_path = '../data'
now = datetime.datetime.now()
print('[{}] Creating and compiling model...'.format(str(datetime.datetime.now())))
model = get_unet0()
print('[{}] Reading train...'.format(str(datetime.datetime.now())))
f = h5py.File(os.path.join(data_path, 'train_coco.h5'), 'r')
X_train = f['train']
y_train = np.array(f['train_mask_coco'])[:, 0]
y_train = np.expand_dims(y_train, 1)
print(y_train.shape)
train_ids = np.array(f['train_ids'])
batch_size = 128
nb_epoch = 4
history = History()
callbacks = [
history,
]
suffix = 'buildings_3_densenet_orig_coco'+"{batch}_{epoch}".format(batch=batch_size,epoch=nb_epoch)
model.compile(optimizer=Nadam(lr=1e-3), loss=jaccard_coef_loss, metrics=['binary_crossentropy', jaccard_coef_int])
from tensorflow.python.client import device_lib
print(device_lib.list_local_devices())
model.fit_generator(batch_generator(X_train, y_train, batch_size, horizontal_flip=True, vertical_flip=True, swap_axis=True),
nb_epoch=nb_epoch,
verbose=1,
samples_per_epoch=batch_size * 25,
callbacks=callbacks,
nb_worker=24
)
save_model(model, "{batch}_{epoch}_{suffix}".format(batch=batch_size, epoch=nb_epoch, suffix=suffix))
save_history(history, suffix)
f.close()
|
[
"os.mkdir",
"keras.layers.Cropping2D",
"keras.models.Model",
"tensorflow.ConfigProto",
"keras.layers.Input",
"os.path.join",
"pandas.DataFrame",
"random.randint",
"threading.Lock",
"random.seed",
"datetime.datetime.now",
"keras.layers.normalization.BatchNormalization",
"keras.layers.Convolution2D",
"numpy.asarray",
"tensorflow.Session",
"keras.optimizers.Nadam",
"keras.layers.AveragePooling2D",
"keras.layers.UpSampling2D",
"keras.backend.clip",
"keras.callbacks.History",
"os.path.isdir",
"keras.backend.sum",
"numpy.zeros",
"numpy.expand_dims",
"tensorflow.python.client.device_lib.list_local_devices",
"numpy.random.random",
"keras.backend.mean",
"numpy.array",
"keras.backend.binary_crossentropy",
"keras.layers.advanced_activations.ELU",
"keras.layers.merge"
] |
[((758, 774), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (772, 774), True, 'import tensorflow as tf\n'), ((969, 983), 'random.seed', 'random.seed', (['(0)'], {}), '(0)\n', (980, 983), False, 'import random\n'), ((846, 871), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (856, 871), True, 'import tensorflow as tf\n'), ((1043, 1083), 'keras.backend.sum', 'K.sum', (['(y_true * y_pred)'], {'axis': '[0, -1, -2]'}), '(y_true * y_pred, axis=[0, -1, -2])\n', (1048, 1083), True, 'from keras import backend as K\n'), ((1096, 1136), 'keras.backend.sum', 'K.sum', (['(y_true + y_pred)'], {'axis': '[0, -1, -2]'}), '(y_true + y_pred, axis=[0, -1, -2])\n', (1101, 1136), True, 'from keras import backend as K\n'), ((1221, 1232), 'keras.backend.mean', 'K.mean', (['jac'], {}), '(jac)\n', (1227, 1232), True, 'from keras import backend as K\n'), ((1346, 1390), 'keras.backend.sum', 'K.sum', (['(y_true * y_pred_pos)'], {'axis': '[0, -1, -2]'}), '(y_true * y_pred_pos, axis=[0, -1, -2])\n', (1351, 1390), True, 'from keras import backend as K\n'), ((1403, 1447), 'keras.backend.sum', 'K.sum', (['(y_true + y_pred_pos)'], {'axis': '[0, -1, -2]'}), '(y_true + y_pred_pos, axis=[0, -1, -2])\n', (1408, 1447), True, 'from keras import backend as K\n'), ((1532, 1543), 'keras.backend.mean', 'K.mean', (['jac'], {}), '(jac)\n', (1538, 1543), True, 'from keras import backend as K\n'), ((1711, 1752), 'keras.layers.Input', 'Input', (['(num_channels, img_rows, img_cols)'], {}), '((num_channels, img_rows, img_cols))\n', (1716, 1752), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((2210, 2263), 'keras.layers.merge', 'merge', (['[conv1a, conv1b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([conv1a, conv1b], mode='concat', concat_axis=1)\n", (2215, 2263), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((2749, 2802), 'keras.layers.merge', 'merge', (['[conv2a, conv2b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([conv2a, conv2b], mode='concat', concat_axis=1)\n", (2754, 2802), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((3434, 3487), 'keras.layers.merge', 'merge', (['[conv3a, conv3b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([conv3a, conv3b], mode='concat', concat_axis=1)\n", (3439, 3487), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((4123, 4176), 'keras.layers.merge', 'merge', (['[conv4a, conv4b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([conv4a, conv4b], mode='concat', concat_axis=1)\n", (4128, 4176), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((4815, 4868), 'keras.layers.merge', 'merge', (['[conv5a, conv5b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([conv5a, conv5b], mode='concat', concat_axis=1)\n", (4820, 4868), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((5215, 5265), 'keras.layers.merge', 'merge', (['[up6, conv6a]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([up6, conv6a], mode='concat', concat_axis=1)\n", (5220, 5265), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((5503, 5554), 'keras.layers.merge', 'merge', (['[up6a, conv6b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([up6a, conv6b], mode='concat', concat_axis=1)\n", (5508, 5554), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((6002, 6052), 'keras.layers.merge', 'merge', (['[up7, conv7a]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([up7, conv7a], mode='concat', concat_axis=1)\n", (6007, 6052), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((6290, 6341), 'keras.layers.merge', 'merge', (['[up7a, conv7b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([up7a, conv7b], mode='concat', concat_axis=1)\n", (6295, 6341), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((6785, 6835), 'keras.layers.merge', 'merge', (['[up8, conv8a]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([up8, conv8a], mode='concat', concat_axis=1)\n", (6790, 6835), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((7073, 7124), 'keras.layers.merge', 'merge', (['[up8a, conv8b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([up8a, conv8b], mode='concat', concat_axis=1)\n", (7078, 7124), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((7568, 7618), 'keras.layers.merge', 'merge', (['[up9, conv9a]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([up9, conv9a], mode='concat', concat_axis=1)\n", (7573, 7618), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((7856, 7907), 'keras.layers.merge', 'merge', (['[up9a, conv9b]'], {'mode': '"""concat"""', 'concat_axis': '(1)'}), "([up9a, conv9b], mode='concat', concat_axis=1)\n", (7861, 7907), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((8333, 8367), 'keras.models.Model', 'Model', ([], {'input': 'inputs', 'output': 'conv10'}), '(input=inputs, output=conv10)\n', (8338, 8367), False, 'from keras.models import Model\n'), ((8577, 8633), 'numpy.zeros', 'np.zeros', (['(batch_size, num_channels, img_rows, img_cols)'], {}), '((batch_size, num_channels, img_rows, img_cols))\n', (8585, 8633), True, 'import numpy as np\n'), ((8649, 8710), 'numpy.zeros', 'np.zeros', (['(batch_size, num_mask_channels, img_rows, img_cols)'], {}), '((batch_size, num_mask_channels, img_rows, img_cols))\n', (8657, 8710), True, 'import numpy as np\n'), ((11735, 11758), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (11756, 11758), False, 'import datetime\n'), ((12114, 12140), 'numpy.expand_dims', 'np.expand_dims', (['y_train', '(1)'], {}), '(y_train, 1)\n', (12128, 12140), True, 'import numpy as np\n'), ((12186, 12210), 'numpy.array', 'np.array', (["f['train_ids']"], {}), "(f['train_ids'])\n", (12194, 12210), True, 'import numpy as np\n'), ((12270, 12279), 'keras.callbacks.History', 'History', ([], {}), '()\n', (12277, 12279), False, 'from keras.callbacks import History\n'), ((1302, 1322), 'keras.backend.clip', 'K.clip', (['y_pred', '(0)', '(1)'], {}), '(y_pred, 0, 1)\n', (1308, 1322), True, 'from keras import backend as K\n'), ((1639, 1674), 'keras.backend.binary_crossentropy', 'binary_crossentropy', (['y_pred', 'y_true'], {}), '(y_pred, y_true)\n', (1658, 1674), False, 'from keras.backend import binary_crossentropy\n'), ((1767, 1801), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (1785, 1801), False, 'from keras.layers.normalization import BatchNormalization\n'), ((1824, 1910), 'keras.layers.Convolution2D', 'Convolution2D', (['(12)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(12, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (1837, 1910), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((1927, 1966), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (1964, 1966), False, 'import keras\n'), ((1989, 2023), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (2007, 2023), False, 'from keras.layers.normalization import BatchNormalization\n'), ((2046, 2132), 'keras.layers.Convolution2D', 'Convolution2D', (['(12)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(12, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (2059, 2132), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((2149, 2188), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (2186, 2188), False, 'import keras\n'), ((2280, 2346), 'keras.layers.AveragePooling2D', 'keras.layers.AveragePooling2D', ([], {'pool_size': '(2, 2)', 'dim_ordering': '"""th"""'}), "(pool_size=(2, 2), dim_ordering='th')\n", (2309, 2346), False, 'import keras\n'), ((2369, 2403), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (2387, 2403), False, 'from keras.layers.normalization import BatchNormalization\n'), ((2425, 2511), 'keras.layers.Convolution2D', 'Convolution2D', (['(24)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(24, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (2438, 2511), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((2528, 2567), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (2565, 2567), False, 'import keras\n'), ((2590, 2624), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (2608, 2624), False, 'from keras.layers.normalization import BatchNormalization\n'), ((2647, 2733), 'keras.layers.Convolution2D', 'Convolution2D', (['(24)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(24, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (2660, 2733), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((2819, 2905), 'keras.layers.Convolution2D', 'Convolution2D', (['(24)', '(1)', '(1)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(24, 1, 1, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (2832, 2905), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((2916, 2969), 'keras.layers.AveragePooling2D', 'AveragePooling2D', ([], {'pool_size': '(2, 2)', 'dim_ordering': '"""th"""'}), "(pool_size=(2, 2), dim_ordering='th')\n", (2932, 2969), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((2992, 3026), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (3010, 3026), False, 'from keras.layers.normalization import BatchNormalization\n'), ((3048, 3134), 'keras.layers.Convolution2D', 'Convolution2D', (['(48)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(48, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (3061, 3134), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((3151, 3190), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (3188, 3190), False, 'import keras\n'), ((3213, 3247), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (3231, 3247), False, 'from keras.layers.normalization import BatchNormalization\n'), ((3270, 3356), 'keras.layers.Convolution2D', 'Convolution2D', (['(48)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(48, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (3283, 3356), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((3373, 3412), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (3410, 3412), False, 'import keras\n'), ((3504, 3590), 'keras.layers.Convolution2D', 'Convolution2D', (['(48)', '(1)', '(1)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(48, 1, 1, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (3517, 3590), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((3601, 3654), 'keras.layers.AveragePooling2D', 'AveragePooling2D', ([], {'pool_size': '(2, 2)', 'dim_ordering': '"""th"""'}), "(pool_size=(2, 2), dim_ordering='th')\n", (3617, 3654), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((3681, 3715), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (3699, 3715), False, 'from keras.layers.normalization import BatchNormalization\n'), ((3737, 3823), 'keras.layers.Convolution2D', 'Convolution2D', (['(96)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(96, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (3750, 3823), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((3840, 3879), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (3877, 3879), False, 'import keras\n'), ((3902, 3936), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (3920, 3936), False, 'from keras.layers.normalization import BatchNormalization\n'), ((3959, 4045), 'keras.layers.Convolution2D', 'Convolution2D', (['(96)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(96, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (3972, 4045), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((4062, 4101), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (4099, 4101), False, 'import keras\n'), ((4193, 4279), 'keras.layers.Convolution2D', 'Convolution2D', (['(96)', '(1)', '(1)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(96, 1, 1, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (4206, 4279), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((4290, 4343), 'keras.layers.AveragePooling2D', 'AveragePooling2D', ([], {'pool_size': '(2, 2)', 'dim_ordering': '"""th"""'}), "(pool_size=(2, 2), dim_ordering='th')\n", (4306, 4343), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((4371, 4405), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (4389, 4405), False, 'from keras.layers.normalization import BatchNormalization\n'), ((4427, 4513), 'keras.layers.Convolution2D', 'Convolution2D', (['(192)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(192, 3, 3, border_mode='same', init='he_uniform',\n dim_ordering='th')\n", (4440, 4513), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((4531, 4570), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (4568, 4570), False, 'import keras\n'), ((4593, 4627), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (4611, 4627), False, 'from keras.layers.normalization import BatchNormalization\n'), ((4650, 4736), 'keras.layers.Convolution2D', 'Convolution2D', (['(192)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(192, 3, 3, border_mode='same', init='he_uniform',\n dim_ordering='th')\n", (4663, 4736), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((4754, 4793), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (4791, 4793), False, 'import keras\n'), ((4998, 5032), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (5016, 5032), False, 'from keras.layers.normalization import BatchNormalization\n'), ((5052, 5138), 'keras.layers.Convolution2D', 'Convolution2D', (['(96)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(96, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (5065, 5138), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((5155, 5194), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (5192, 5194), False, 'import keras\n'), ((5284, 5318), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (5302, 5318), False, 'from keras.layers.normalization import BatchNormalization\n'), ((5339, 5425), 'keras.layers.Convolution2D', 'Convolution2D', (['(96)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(96, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (5352, 5425), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((5442, 5481), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (5479, 5481), False, 'import keras\n'), ((5571, 5657), 'keras.layers.Convolution2D', 'Convolution2D', (['(96)', '(1)', '(1)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(96, 1, 1, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (5584, 5657), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((5785, 5819), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (5803, 5819), False, 'from keras.layers.normalization import BatchNormalization\n'), ((5839, 5925), 'keras.layers.Convolution2D', 'Convolution2D', (['(48)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(48, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (5852, 5925), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((5942, 5981), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (5979, 5981), False, 'import keras\n'), ((6071, 6105), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (6089, 6105), False, 'from keras.layers.normalization import BatchNormalization\n'), ((6126, 6212), 'keras.layers.Convolution2D', 'Convolution2D', (['(48)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(48, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (6139, 6212), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((6229, 6268), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (6266, 6268), False, 'import keras\n'), ((6358, 6444), 'keras.layers.Convolution2D', 'Convolution2D', (['(48)', '(1)', '(1)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(48, 1, 1, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (6371, 6444), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((6568, 6602), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (6586, 6602), False, 'from keras.layers.normalization import BatchNormalization\n'), ((6622, 6708), 'keras.layers.Convolution2D', 'Convolution2D', (['(24)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(24, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (6635, 6708), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((6725, 6764), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (6762, 6764), False, 'import keras\n'), ((6854, 6888), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (6872, 6888), False, 'from keras.layers.normalization import BatchNormalization\n'), ((6909, 6995), 'keras.layers.Convolution2D', 'Convolution2D', (['(24)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(24, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (6922, 6995), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((7012, 7051), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (7049, 7051), False, 'import keras\n'), ((7141, 7227), 'keras.layers.Convolution2D', 'Convolution2D', (['(24)', '(1)', '(1)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(24, 1, 1, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (7154, 7227), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((7351, 7385), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (7369, 7385), False, 'from keras.layers.normalization import BatchNormalization\n'), ((7405, 7491), 'keras.layers.Convolution2D', 'Convolution2D', (['(12)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(12, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (7418, 7491), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((7508, 7547), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (7545, 7547), False, 'import keras\n'), ((7637, 7671), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (7655, 7671), False, 'from keras.layers.normalization import BatchNormalization\n'), ((7692, 7778), 'keras.layers.Convolution2D', 'Convolution2D', (['(12)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(12, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (7705, 7778), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((7795, 7834), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (7832, 7834), False, 'import keras\n'), ((7930, 7990), 'keras.layers.Cropping2D', 'Cropping2D', ([], {'cropping': '((16, 16), (16, 16))', 'dim_ordering': '"""th"""'}), "(cropping=((16, 16), (16, 16)), dim_ordering='th')\n", (7940, 7990), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((8010, 8044), 'keras.layers.normalization.BatchNormalization', 'BatchNormalization', ([], {'mode': '(0)', 'axis': '(1)'}), '(mode=0, axis=1)\n', (8028, 8044), False, 'from keras.layers.normalization import BatchNormalization\n'), ((8065, 8151), 'keras.layers.Convolution2D', 'Convolution2D', (['(12)', '(3)', '(3)'], {'border_mode': '"""same"""', 'init': '"""he_uniform"""', 'dim_ordering': '"""th"""'}), "(12, 3, 3, border_mode='same', init='he_uniform', dim_ordering\n ='th')\n", (8078, 8151), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((8165, 8204), 'keras.layers.advanced_activations.ELU', 'keras.layers.advanced_activations.ELU', ([], {}), '()\n', (8202, 8204), False, 'import keras\n'), ((8232, 8311), 'keras.layers.Convolution2D', 'Convolution2D', (['num_mask_channels', '(1)', '(1)'], {'activation': '"""sigmoid"""', 'dim_ordering': '"""th"""'}), "(num_mask_channels, 1, 1, activation='sigmoid', dim_ordering='th')\n", (8245, 8311), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((8823, 8864), 'random.randint', 'random.randint', (['(0)', '(X_width - img_cols - 1)'], {}), '(0, X_width - img_cols - 1)\n', (8837, 8864), False, 'import random\n'), ((8890, 8932), 'random.randint', 'random.randint', (['(0)', '(X_height - img_rows - 1)'], {}), '(0, X_height - img_rows - 1)\n', (8904, 8932), False, 'import random\n'), ((8959, 8992), 'random.randint', 'random.randint', (['(0)', '(X.shape[0] - 1)'], {}), '(0, X.shape[0] - 1)\n', (8973, 8992), False, 'import random\n'), ((9138, 9248), 'numpy.array', 'np.array', (['X[random_image, :, random_height:random_height + img_rows, random_width:\n random_width + img_cols]'], {}), '(X[random_image, :, random_height:random_height + img_rows,\n random_width:random_width + img_cols])\n', (9146, 9248), True, 'import numpy as np\n'), ((9521, 9537), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (9535, 9537), False, 'import threading\n'), ((10875, 10897), 'os.path.isdir', 'os.path.isdir', (['"""cache"""'], {}), "('cache')\n", (10888, 10897), False, 'import os\n'), ((10908, 10925), 'os.mkdir', 'os.mkdir', (['"""cache"""'], {}), "('cache')\n", (10916, 10925), False, 'import os\n'), ((11137, 11171), 'os.path.join', 'os.path.join', (['"""cache"""', 'weight_name'], {}), "('cache', weight_name)\n", (11149, 11171), False, 'import os\n'), ((11604, 11645), 'os.path.join', 'os.path.join', (['"""../src/cache"""', 'weight_name'], {}), "('../src/cache', weight_name)\n", (11616, 11645), False, 'import os\n'), ((11970, 12010), 'os.path.join', 'os.path.join', (['data_path', '"""train_coco.h5"""'], {}), "(data_path, 'train_coco.h5')\n", (11982, 12010), False, 'import os\n'), ((12062, 12092), 'numpy.array', 'np.array', (["f['train_mask_coco']"], {}), "(f['train_mask_coco'])\n", (12070, 12092), True, 'import numpy as np\n'), ((12617, 12648), 'tensorflow.python.client.device_lib.list_local_devices', 'device_lib.list_local_devices', ([], {}), '()\n', (12646, 12648), False, 'from tensorflow.python.client import device_lib\n'), ((8426, 8439), 'numpy.asarray', 'np.asarray', (['x'], {}), '(x)\n', (8436, 8439), True, 'import numpy as np\n'), ((11287, 11316), 'pandas.DataFrame', 'pd.DataFrame', (['history.history'], {}), '(history.history)\n', (11299, 11316), True, 'import pandas as pd\n'), ((12462, 12477), 'keras.optimizers.Nadam', 'Nadam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (12467, 12477), False, 'from keras.optimizers import Nadam\n'), ((4894, 4938), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)', 'dim_ordering': '"""th"""'}), "(size=(2, 2), dim_ordering='th')\n", (4906, 4938), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((5681, 5725), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)', 'dim_ordering': '"""th"""'}), "(size=(2, 2), dim_ordering='th')\n", (5693, 5725), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((6464, 6508), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)', 'dim_ordering': '"""th"""'}), "(size=(2, 2), dim_ordering='th')\n", (6476, 6508), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((7247, 7291), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)', 'dim_ordering': '"""th"""'}), "(size=(2, 2), dim_ordering='th')\n", (7259, 7291), False, 'from keras.layers import Input, merge, Convolution2D, MaxPooling2D, UpSampling2D, Cropping2D, AveragePooling2D\n'), ((11055, 11087), 'os.path.join', 'os.path.join', (['"""cache"""', 'json_name'], {}), "('cache', json_name)\n", (11067, 11087), False, 'import os\n'), ((11822, 11845), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (11843, 11845), False, 'import datetime\n'), ((11924, 11947), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (11945, 11947), False, 'import datetime\n'), ((10207, 10225), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (10223, 10225), True, 'import numpy as np\n'), ((10372, 10390), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (10388, 10390), True, 'import numpy as np\n'), ((10533, 10551), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (10549, 10551), True, 'import numpy as np\n'), ((11531, 11570), 'os.path.join', 'os.path.join', (['"""../src/cache"""', 'json_name'], {}), "('../src/cache', json_name)\n", (11543, 11570), False, 'import os\n')]
|
import matplotlib
matplotlib.use("agg")
import matplotlib.pyplot as plt
import sys
sys.path.insert(0, ".")
import os, argparse
import numpy as np
from lib.fid import fid_score
"""
parser = argparse.ArgumentParser()
parser.add_argument("--type", default=0, type=int, help="The path to pytorch inceptionv3 weight. You can obtain this by torchvision incetion_v3 function.")
args = parser.parse_args()
"""
types = ["original", "changed", "tf"]
def calc_fid_given_type(t):
fids = []
ref_path = "/home/xujianjing/data/cifar10_image/%s_mu_sigma.npy" % t
tar_path = "/home/xujianjing/LBSGAN/logs/cifar_bs128/%s/%d_mu_sigma.npy"
ref = np.load(ref_path).tolist()
ref_mu, ref_sigma = ref['mu'], ref['sigma']
for i in range(1, 200, 5):
d = tar_path % (t, i)
tar = np.load(d).tolist()
tar_mu, tar_sigma = tar['mu'], tar['sigma']
print("=> calc fid %s" % d)
fid = fid_score.calculate_frechet_distance(ref_mu, ref_sigma, tar_mu, tar_sigma)
print("Epoch %03d\t\t%.3f" % (i, fid))
fids.append(fid)
return fids
for t in types:
fids = calc_fid_given_type(t)
plt.plot(fids)
plt.legend(types)
plt.savefig("fids.png")
plt.close()
|
[
"numpy.load",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"matplotlib.pyplot.legend",
"sys.path.insert",
"matplotlib.use",
"lib.fid.fid_score.calculate_frechet_distance",
"matplotlib.pyplot.savefig"
] |
[((18, 39), 'matplotlib.use', 'matplotlib.use', (['"""agg"""'], {}), "('agg')\n", (32, 39), False, 'import matplotlib\n'), ((83, 106), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""."""'], {}), "(0, '.')\n", (98, 106), False, 'import sys\n'), ((1156, 1173), 'matplotlib.pyplot.legend', 'plt.legend', (['types'], {}), '(types)\n', (1166, 1173), True, 'import matplotlib.pyplot as plt\n'), ((1174, 1197), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""fids.png"""'], {}), "('fids.png')\n", (1185, 1197), True, 'import matplotlib.pyplot as plt\n'), ((1198, 1209), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1207, 1209), True, 'import matplotlib.pyplot as plt\n'), ((1141, 1155), 'matplotlib.pyplot.plot', 'plt.plot', (['fids'], {}), '(fids)\n', (1149, 1155), True, 'import matplotlib.pyplot as plt\n'), ((918, 992), 'lib.fid.fid_score.calculate_frechet_distance', 'fid_score.calculate_frechet_distance', (['ref_mu', 'ref_sigma', 'tar_mu', 'tar_sigma'], {}), '(ref_mu, ref_sigma, tar_mu, tar_sigma)\n', (954, 992), False, 'from lib.fid import fid_score\n'), ((646, 663), 'numpy.load', 'np.load', (['ref_path'], {}), '(ref_path)\n', (653, 663), True, 'import numpy as np\n'), ((796, 806), 'numpy.load', 'np.load', (['d'], {}), '(d)\n', (803, 806), True, 'import numpy as np\n')]
|
import numpy as np
# NumPy introduces a simple file format for ndarray objects. This .npy file stores data, shape, dtype and other information required to reconstruct the ndarray in a disk file such that the array is correctly retrieved even if the file is on another machine with different architecture.
a = np.array([1,2,3,4,5])
np.save('outfile',a)
b = np.load('outfile.npy')
print ("Loaded .npy b: ", b)
a = np.array([1,2,3,4,5])
np.savetxt('out.txt',a)
b = np.loadtxt('out.txt')
print ("Loaded .txt b: ", b)
|
[
"numpy.load",
"numpy.save",
"numpy.savetxt",
"numpy.array",
"numpy.loadtxt"
] |
[((313, 338), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5]'], {}), '([1, 2, 3, 4, 5])\n', (321, 338), True, 'import numpy as np\n'), ((337, 358), 'numpy.save', 'np.save', (['"""outfile"""', 'a'], {}), "('outfile', a)\n", (344, 358), True, 'import numpy as np\n'), ((365, 387), 'numpy.load', 'np.load', (['"""outfile.npy"""'], {}), "('outfile.npy')\n", (372, 387), True, 'import numpy as np\n'), ((427, 452), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5]'], {}), '([1, 2, 3, 4, 5])\n', (435, 452), True, 'import numpy as np\n'), ((451, 475), 'numpy.savetxt', 'np.savetxt', (['"""out.txt"""', 'a'], {}), "('out.txt', a)\n", (461, 475), True, 'import numpy as np\n'), ((481, 502), 'numpy.loadtxt', 'np.loadtxt', (['"""out.txt"""'], {}), "('out.txt')\n", (491, 502), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import fitsio
import treecorr
import ngmix
import os
import errno
from esutil import htm
# import lfunc
# Can use for debugging
import pudb
def mag_flux_tick_function(flux):
return ["%.1f" % mag for mag in 30 - 2.5*np.log10(flux)]
def flux_mag_convert(mag, zeropt = 30.0):
return 10**((30 - mag)/2.5)
def match_catalogs(truth_catalog = None, meas_catalog = None,ratag_truth = 'ra', dectag_truth = 'dec',\
ratag_meas = 'ra',dectag_meas = 'dec', match_radius = 1./3600):
matcher = htm.Matcher(depth=14,ra=truth_catalog[ratag_truth], dec = truth_catalog[dectag_truth])
id_m, id_t, dist = matcher.match(ra = meas_catalog[ratag_meas], dec = meas_catalog[dectag_meas],radius=match_radius)
truth_cut = truth_catalog[id_t]
meas_cut = meas_catalog[id_m]
return truth_cut, meas_cut, dist
def remove_close_pairs(catalog,ratag='ra',dectag='dec',radius=5./3600):
matcher = htm.Matcher(depth=14,ra=catalog[ratag],dec=catalog[dectag])
ind1,ind2,dist = matcher.match(ra=catalog[ratag],dec=catalog[dectag],radius=radius,maxmatch=0)
nonself = dist>0
ind1 = ind1[nonself]
ind2= ind2[nonself]
all_inds = np.arange(catalog.size)
keep = np.in1d(all_inds,ind1,invert=True)
return catalog[keep]
def quality_cuts(catalog, band = 'i'):
bands = ['g','r','i','z','Y']
bandind = np.arange(len(bands))[np.in1d(bands,band)][0]
keep = (catalog['flags'] == 0) & (catalog['cm_s2n_r'] > 10) & (catalog['cm_T']/catalog['cm_T_err'] > .5) & (catalog['cm_T']/catalog['psfrec_T'] > 0.5)
catalog = catalog[keep]
return catalog
def get_catalogs(path = '.', tilename = 'DES0347-5540', re_id = '0', minsep = 0., stars=False, ratag = 'ra', dectag = 'dec'):
if type(re_id) is not type(''):
re_id = str(re_id)
if stars is False:
cat = tilename+'_'+re_id+'_balrog_truth_cat_gals.fits'
filename = os.path.join(path, re_id, tilename, cat)
truth_catalog = fitsio.read(filename)
else:
cat = tilename+'_'+re_id+'_balrog_truth_cat_stars.fits'
filename = os.path.join(path, re_id, tilename, cat)
truth_catalog = fitsio.read(filename)
if minsep > 0.:
truth_catalog = remove_close_pairs(truth_catalog,radius=minsep,ratag=ratag,dectag=dectag)
cat = tilename + '_mof.fits'
filename = os.path.join(path, re_id, tilename, 'mof', cat)
meas_catalog = fitsio.read(filename)
meas_catalog = quality_cuts(meas_catalog)
truth_matched, meas_gal_matched, dist = match_catalogs(truth_catalog = truth_catalog, meas_catalog =meas_catalog,ratag_truth = ratag, dectag_truth = dectag)
return truth_catalog, truth_matched, meas_gal_matched, dist
def make_plots(truth = None, meas_matched = None, truth_matched = None, sep = None, filetag = '',
bandind = 1, outdir=None):
# TODO: fontsize as input
matplotlib.rcParams.update({'font.size': 16})
if outdir is None:
outdir = os.getcwd()
else:
try:
os.makedirs(outdir)
except OSError as e:
# Ignore error if dir already exists
if e.errno != errno.EEXIST:
raise e
# flux, size:
tag = 'cm_flux'
errtag = 'cm_flux_cov'
bands = ['g','r','i','z']
mag_tick_locations = np.array([0,5,10,15,20,25,30,35,40])
flux_tick_locations = flux_mag_convert(mag_tick_locations)
err = np.sqrt(meas_matched['cm_flux_cov'][:,bandind,bandind])
magerr = err / meas_matched['cm_flux'][:,bandind]
dy = [-4, 2]
a = 0.5
fig,(ax1,ax2,ax3) = plt.subplots(nrows=1,ncols=3,figsize=(21,7))
ax1.plot(truth_matched['cm_mag'][:,bandind],meas_matched['cm_mag'][:,bandind] - truth_matched['cm_mag'][:,bandind],'.k',alpha=a)
ax1.axhline(0,color='red',linestyle='--',alpha=0.5)
ax1.set_ylim(dy[0],dy[1])
#ax1.set_yscale('symlog')
ax1.set_xlim(16,26)
ax1b = ax1.twiny()
ax1b.set_xlim(ax1.get_xlim())
ax1b.set_xticks(mag_tick_locations)
ax1b.set_xticklabels(np.log10(flux_tick_locations))
ax1b.set_xlabel('log_10 flux')
ax1.set_xlabel('magnitude')
ax1.set_xlabel('cm_mag ('+bands[bandind]+')')
ax1.set_ylabel('magnitude difference ('+bands[bandind]+')')
ax2.plot(truth_matched['cm_mag'][:,bandind],(meas_matched[tag][:,bandind] - truth_matched[tag][:,bandind])/np.sqrt(meas_matched[errtag][:,bandind,bandind]),'.k',alpha=a)
#ax2.set_ylim(-250,250)
ax2.set_xlim(16,26)
ax2.axhline(0,color='red',linestyle='--',alpha=0.5)
ax2.set_xlabel('cm_mag ('+bands[bandind]+')')
ax2.set_ylabel('flux chi ('+bands[bandind]+')')
ax3.hist((meas_matched[tag][:,bandind] - truth_matched[tag][:,bandind])/np.sqrt(meas_matched[errtag][:,bandind,bandind]),bins=np.linspace(-25,25,250),color='k')
ax3.axvline(0,color='red',linestyle='--',alpha=a)
ax3.set_xlabel('flux chi ('+bands[bandind]+')')
outfile = os.path.join(outdir, 'cm_flux-'+filetag+bands[bandind])
fig.savefig(outfile)
plt.close(fig)
# Now make a detection efficiency plot.
size_bin_edges = np.linspace(0,10,26)
size_bin_centers = (size_bin_edges[1:] + size_bin_edges[0:-1])/2.
flux_bin_edges = np.linspace(15,28,11)
flux_bin_centers = (flux_bin_edges[1:] + flux_bin_edges[0:-1])/2.
nobj_truth_meas_mag,_ = np.histogram(truth_matched['cm_mag'],bins=flux_bin_edges)
nobj_truth_input_mag,_= np.histogram(truth['cm_mag'],bins=flux_bin_edges)
nobj_truth_meas_size,_ = np.histogram(np.log10(truth_matched['cm_T']),bins=size_bin_edges)
nobj_truth_input_size,_= np.histogram(np.log10(truth['cm_T']),bins=size_bin_edges)
fig,(ax1,ax2,ax3) = plt.subplots(nrows=1,ncols=3,figsize=(21,7))
ax1.plot(flux_bin_centers, nobj_truth_meas_mag*1./nobj_truth_input_mag)
ax1.set_ylabel('magnitude completeness')
ax1.set_xlabel('magnitude')
ax2.plot(size_bin_centers, nobj_truth_meas_size*1./nobj_truth_input_size)
ax2.set_ylabel('size completeness')
ax2.set_xlabel('size (T)')
ax3.plot(truth['cm_mag'][:,1],np.log10(truth['cm_T']),'.')
ax3.plot(truth_matched['cm_mag'][:,1],np.log10(truth_matched['cm_T']),'.',alpha=0.5)
ax3.set_xlabel('mag')
ax3.set_ylabel(r'log_10 cm_T')
outfile = os.path.join(outdir, 'completeness-'+filetag+bands[bandind])
fig.savefig(outfile)
plt.close(fig)
# Now do this for colors.
gr_meas = meas_matched['cm_mag'][:,0] - meas_matched['cm_mag'][:,1]
ri_meas = meas_matched['cm_mag'][:,1] - meas_matched['cm_mag'][:,2]
iz_meas = meas_matched['cm_mag'][:,2] - meas_matched['cm_mag'][:,3]
gr_truth = truth_matched['cm_mag'][:,0] - truth_matched['cm_mag'][:,1]
ri_truth = truth_matched['cm_mag'][:,1] - truth_matched['cm_mag'][:,2]
iz_truth = truth_matched['cm_mag'][:,2] - truth_matched['cm_mag'][:,3]
dy = 2.0
fig,(ax1,ax2,ax3) = plt.subplots(nrows=1,ncols=3,figsize=(21,7))
ax1.plot(gr_truth, gr_meas - gr_truth,'.')
ax1.axhline(0,color='red',linestyle='--')
ax1.set_ylim(-dy,dy)
ax1.set_xlim(-0.5,2.5)
ax1.set_xlabel('g-r (truth)')
ax1.set_ylabel('g-r (meas) - g-r (truth)')
ax2.plot(ri_truth,ri_meas - ri_truth,'.')
ax2.axhline(0,color='red',linestyle='--')
ax2.set_ylim(-dy,dy)
ax2.set_xlabel('r-i (truth)')
ax2.set_ylabel('r-i (meas) - r-i (truth)')
ax3.plot(iz_truth,iz_meas - iz_truth,'.')
ax3.axhline(0,color='red',linestyle='--')
ax3.set_ylim(-dy,dy)
ax3.set_xlabel('i-z (truth)')
ax3.set_ylabel('i-z (meas) - i-z (truth)')
outfile = os.path.join(outdir, 'colors_vs_colors')
fig.savefig(outfile)
plt.close(fig)
# Same for colors, but vs mag.
dy = 1.6
fig,(ax1,ax2,ax3) = plt.subplots(nrows=1,ncols=3,figsize=(21,7))
ax1.plot(truth_matched['cm_mag'][:,1], gr_meas - gr_truth,'.')
ax1.axhline(0,color='red',linestyle='--')
ax1.set_ylim(-dy,dy)
ax1.set_xlabel('r (truth)')
ax1.set_ylabel('g-r (meas) - g-r (truth)')
ax2.plot(truth_matched['cm_mag'][:,1],ri_meas - ri_truth,'.')
ax2.axhline(0,color='red',linestyle='--')
ax2.set_ylim(-dy,dy)
ax2.set_xlabel('r (truth)')
ax2.set_ylabel('r-i (meas) - r-i (truth)')
ax3.plot(truth_matched['cm_mag'][:,1],iz_meas - iz_truth,'.')
ax3.axhline(0,color='red',linestyle='--')
ax3.set_ylim(-dy,dy)
ax3.set_xlabel('r (truth)')
ax3.set_ylabel('i-z (meas) - i-z (truth)')
outfile = os.path.join(outdir, 'colors_vs_rmag')
fig.savefig(outfile)
plt.close(fig)
# And finally, vs T
fig,(ax1,ax2,ax3) = plt.subplots(nrows=1,ncols=3,figsize=(21,7))
ax1.plot(truth_matched['cm_T'], gr_meas - gr_truth,'.')
ax1.axhline(0,color='red',linestyle='--')
ax1.set_ylim(-3,3)
ax1.set_xscale('log')
ax1.set_xlabel(' cm_T (truth)')
ax1.set_ylabel('g-r (meas) - g-r (truth)')
ax2.plot(truth_matched['cm_T'],ri_meas - ri_truth,'.')
ax2.axhline(0,color='red',linestyle='--')
ax2.set_ylim(-3,3)
ax2.set_xscale('log')
ax2.set_xlabel('cm_T (truth)')
ax2.set_ylabel('r-i (meas) - r-i (truth)')
ax3.plot(truth_matched['cm_T'],iz_meas - iz_truth,'.')
ax3.axhline(0,color='red',linestyle='--')
ax3.set_ylim(-3,3)
ax3.set_xscale('log')
ax3.set_xlabel('cm_T (truth)')
ax3.set_ylabel('i-z (meas) - i-z (truth)')
outfile = os.path.join(outdir, 'colors_vs_T')
fig.savefig(outfile)
plt.close(fig)
# Variant of the below plot
fig,ax1 = plt.subplots(nrows=1,ncols=1,figsize=(7,7))
color = ['red','blue','black','cyan','orange']
delta_mag = meas_matched['cm_mag'][:,1] - truth_matched['cm_mag'][:,1]
n_delta_bins = 5
delta_mag_bds = np.arange(n_delta_bins+1)*0.5
delta_mag_bds[-1] = 100
for i in xrange(n_delta_bins):
these = (np.abs(delta_mag) > delta_mag_bds[i]) & (np.abs(delta_mag) <= delta_mag_bds[i+1])
ax1.plot(meas_matched['cm_T'][these],delta_mag[these],'.',alpha=0.5,color='k')
ax1.set_xscale('log')
ax1.set_xlabel('cm_T (r)')
ax1.set_ylabel('cm_mag Measured - True (r)')
outfile = os.path.join(outdir, 't_vs_mag_dif'+bands[bandind])
fig.savefig(outfile)
plt.close(fig)
# Fun color-coded magnitude plot.
fig,ax1 = plt.subplots(nrows=1,ncols=1,figsize=(7,7))
color = ['red','blue','black','cyan','orange']
delta_mag = meas_matched['cm_mag'][:,1] - truth_matched['cm_mag'][:,1]
n_delta_bins = 5
delta_mag_bds = np.arange(n_delta_bins+1)*0.5
delta_mag_bds[-1] = 100
for i in xrange(n_delta_bins):
these = (np.abs(delta_mag) > delta_mag_bds[i]) & (np.abs(delta_mag) <= delta_mag_bds[i+1])
ax1.plot(meas_matched['cm_T'][these],meas_matched['cm_mag'][:,1][these],'.',alpha=0.5,color=color[i],label='%.1f <'%delta_mag_bds[i]+'|delta mag| < '+'%.1f'%delta_mag_bds[i+1])
ax1.set_xscale('log')
ax1.set_xlabel('cm_T (r)')
ax1.set_ylabel('cm_mag (r)')
ax1.legend(loc='best')
outfile = os.path.join(outdir, 't_vs_mag_color-coded-by-delta-mag'+bands[bandind])
fig.savefig(outfile)
plt.close(fig)
# Do matching errors matter?
fig,ax1 = plt.subplots(figsize=(7,7))
color = ['red','blue','black','cyan','orange']
delta_mag = meas_matched['cm_mag'][:,1] - truth_matched['cm_mag'][:,1]
n_delta_bins = 5
delta_mag_bds = np.arange(n_delta_bins+1)*0.5
delta_mag_bds[-1] = 100
for i in xrange(n_delta_bins):
these = (np.abs(delta_mag) > delta_mag_bds[i]) & (np.abs(delta_mag) <= delta_mag_bds[i+1])
ax1.plot(sep[these]*3600, meas_matched['cm_T'][these] - truth_matched['cm_T'][these],'.',alpha=0.5,color=color[i],label='%.1f <'%delta_mag_bds[i]+'|delta mag| < '+'%.1f'%delta_mag_bds[i+1])
ax1.set_xlabel('match separation (arcsec)')
ax1.set_ylabel('error in T')
ax1.set_yscale('log')
ax1.set_xscale('log')
ax1.legend(loc='best')
outfile = os.path.join(outdir, 'sep_vs_t')
fig.savefig(outfile)
plt.close(fig)
fig,ax1 = plt.subplots(figsize=(7,7))
#small = (truth_matched['cm_T'] / truth_matched['cm_T_err'] ) > 10.
small = truth_matched['cm_T'] < 10.
ax1.plot(meas_matched['cm_T_err'],meas_matched['cm_T'] - truth_matched['cm_T'],'.',markersize=5)
ax1.plot(meas_matched['cm_T_err'][small],meas_matched['cm_T'][small] - truth_matched['cm_T'][small],'.',markersize=4)
ax1.axhline(0,color='red',linestyle='--')
ax1.set_xlabel('reported error on cm_T')
ax1.set_ylabel('actual error in cm_T')
ax1.set_yscale('symlog')
ax1.set_xscale('log')
xlo,xhi = ax1.get_xlim()
ax1.plot(np.logspace(np.log10(xlo),np.log10(xhi),1000),np.logspace(np.log10(xlo),np.log10(xhi),1000),color='red',linestyle='--')
ax1.plot(np.linspace(np.log10(xlo),np.log10(xhi),1000),-np.linspace(np.log10(xlo),np.log10(xhi),1000),color='red',linestyle='--')
outfile = os.path.join(outdir, 'Terr')
fig.savefig(outfile)
plt.close(fig)
fig,ax1 = plt.subplots(figsize=(7,7))
for i in xrange(n_delta_bins):
these = (np.abs(delta_mag) >= delta_mag_bds[i]) & (np.abs(delta_mag) <= delta_mag_bds[i+1])
ax1.plot(truth_matched['cm_logsb'][these,1],np.abs(meas_matched['cm_T'][these] - truth_matched['cm_T'][these])/meas_matched['cm_T_err'][these],'.',color=color[i],label='%.1f <'%delta_mag_bds[i]+'|delta mag| < '+'%.1f'%delta_mag_bds[i+1],alpha=0.5)
ax1.set_yscale('symlog')
#ax1.set_ylim(0,10)
ax1.set_xlabel('cm_logsb')
ax1.set_ylabel('| measured - input| cm_T / reported error')
ax1.legend(loc='best')
outfile = os.path.join(outdir, 'logsb_vs_cm_T')
fig.savefig(outfile)
plt.close(fig)
# Shape and Correlation plots
# fig, ax = ...
# pudb.set_trace()
if bandind == 1:
# only do once!
# min/max separation and bin size
mins, maxs = 0.1, 10
bs = 0.075
# First do the truth catalog
g1, g2 = truth_matched['cm_g'][:,0], truth_matched['cm_g'][:,1]
e1, e2 = ngmix.shape.g1g2_to_e1e2(g1, g2)
# Easiest to use treecorr by making a new temporary catalog
truth_outfile = os.path.join(outdir,'treecorr_temp_file_truth.fits')
delete_file(truth_outfile)
fits = fitsio.FITS(truth_outfile,'rw')
data = [truth_matched['ra'], truth_matched['dec'], g1, g2, e1, e2]
names = ['ra', 'dec', 'g1', 'g2', 'e1', 'e2']
fits.write(data, names=names)
fits.close()
cat = treecorr.Catalog(truth_outfile, ra_col='ra', dec_col='dec',
ra_units='degrees', dec_units='degrees',
g1_col='e1', g2_col='e2')
gg = treecorr.GGCorrelation(min_sep=mins, max_sep=maxs, bin_size=bs,
sep_units='arcmin')
gg.process(cat)
fig = plot_gg_corr(gg, plt_type='Truth')
outfile = os.path.join(outdir, 'gg_corr_truth.png')
fig.savefig(outfile)
plt.close(fig)
# Now for measured catalog
g1, g2 = meas_matched['cm_g'][:,0], meas_matched['cm_g'][:,1]
e1, e2 = ngmix.shape.g1g2_to_e1e2(g1, g2)
# Easiest to use treecorr by making a new temporary catalog
meas_outfile = os.path.join(outdir,'treecorr_temp_file_meas.fits')
delete_file(meas_outfile)
fits = fitsio.FITS(meas_outfile,'rw', clobber=True)
data = [meas_matched['ra'], meas_matched['dec'], g1, g2, e1, e2]
names = ['ra', 'dec', 'g1', 'g2', 'e1', 'e2']
fits.write(data, names=names)
fits.close()
cat = treecorr.Catalog(meas_outfile, ra_col='ra', dec_col='dec',
ra_units='degrees', dec_units='degrees',
g1_col='e1', g2_col='e2')
gg = treecorr.GGCorrelation(min_sep=mins, max_sep=maxs, bin_size=bs,
sep_units='arcmin')
gg.process(cat)
fig = plot_gg_corr(gg, plt_type='Measured')
outfile = os.path.join(outdir, 'gg_corr_meas.png')
fig.savefig(outfile)
plt.close(fig)
# Now for differences
g1t, g2t = truth_matched['cm_g'][:,0], truth_matched['cm_g'][:,1]
e1t, e2t = ngmix.shape.g1g2_to_e1e2(g1t, g2t)
g1m, g2m = meas_matched['cm_g'][:,0], meas_matched['cm_g'][:,1]
e1m, e2m = ngmix.shape.g1g2_to_e1e2(g1m, g2m)
g1d, g2d = g1m-g1t, g2m-g2t
e1d, e2d = e1m-e1t, e2m-e2t
# Easiest to use treecorr by making a new temporary catalog
diff_outfile = os.path.join(outdir,'treecorr_temp_file_diff.fits')
delete_file(diff_outfile)
fits = fitsio.FITS(diff_outfile,'rw', clobber=True)
data = [truth_matched['ra'], truth_matched['dec'], g1d, g2d, e1d, e2d]
names = ['ra', 'dec', 'g1', 'g2', 'e1', 'e2']
fits.write(data, names=names)
fits.close()
cat = treecorr.Catalog(diff_outfile, ra_col='ra', dec_col='dec',
ra_units='degrees', dec_units='degrees',
g1_col='e1', g2_col='e2')
gg = treecorr.GGCorrelation(min_sep=mins, max_sep=maxs, bin_size=bs,
sep_units='arcmin')
gg.process(cat)
fig = plot_gg_corr(gg, plt_type='Measured-True')
outfile = os.path.join(outdir, 'gg_corr_diff.png')
fig.savefig(outfile)
plt.close(fig)
def delete_file(filename):
try:
os.remove(filename)
except OSError as e: # this would be "except OSError, e:" before Python 2.6
if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory
raise
def plot_gg_corr(gg, plt_type=None):
r = np.exp(gg.meanlogr)
xip = gg.xip
xim = gg.xim
sig = np.sqrt(gg.varxi)
plt.plot(r, xip, color='blue')
# plt.plot(r, -xip, color='blue', ls=':')
plt.errorbar(r[xip>0], xip[xip>0], yerr=sig[xip>0], color='blue', lw=1, ls='')
# plt.errorbar(r[xip<0], -xip[xip<0], yerr=sig[xip<0], color='blue', lw=0.1, ls='')
lp = plt.errorbar(-r, xip, yerr=sig, color='blue')
plt.plot(r, xim, color='green')
# plt.plot(r, -xim, color='green', ls=':')
plt.errorbar(r[xim>0], xim[xim>0], yerr=sig[xim>0], color='green', lw=1, ls='')
# plt.errorbar(r[xim<0], -xim[xim<0], yerr=sig[xim<0], color='green', lw=0.1, ls='')
lm = plt.errorbar(-r, xim, yerr=sig, color='green')
# Reference line
plt.axhline(0, linestyle='--', c='k')
plt.xscale('log')
# if plt_type == 'Measured' or plt_type=='Truth':
# plt.yscale('log', nonposy='clip')
# plt.yscale('log', nonposy='clip')
plt.xlabel(r'$\theta$ (arcmin)')
plt.legend([lp, lm], [r'$\xi_+(\theta)$', r'$\xi_-(\theta)$'])
# plt.xlim( [1,100] )
plt.ylabel(r'$\xi_{+,-}$')
plt.title(plt_type, fontsize=16)
plt.gcf().set_size_inches(7,7)
return plt.gcf()
def make_star_plots(truth = None, meas_matched = None, truth_matched = None, filetag = ''):
tag = 'cm_flux'
errtag = 'cm_flux_cov'
bandind = 2
bands = ['g','r','i','z']
pass
def main(argv):
run_name = 'shear_test_ps'
# run_name = 'sof_stars'
# run_name = 'grid_with_noise'
# run_name = 'grid_test_ngmix'
# run_name = 'grid_test_shear_sof'
path = os.path.join('/home/spencer/research/balrog/outputs/' + run_name)
tilename = 'DES0347-5540'
re_id = '0'
min_sep = 0./3600 # minimum input object separation, degrees
#truth_gal_catalog = fitsio.read(path+tilename+'_0/'+tilename+'_'+re_id+'_balrog_truth_cat_gals.fits')
#truth_star_catalog = fitsio.read(path+tilename+'_0/'+tilename+'_'+re_id+'_balrog_truth_cat_stars.fits')
#meas_catalog = fitsio.read(path+tilename+'_0/'+tilename+'_mof.fits')
#truth_gal_matched, meas_gal_matched = match_catalogs(truth_catalog = truth_gal_catalog, meas_catalog =meas_catalog)
truth_gal_catalog, truth_gal_matched, meas_gal_matched, sep = get_catalogs(path = path, tilename = tilename, re_id = re_id, minsep = min_sep,\
ratag = 'ra', dectag = 'dec',stars=False)
for i in xrange(4):
make_plots(truth=truth_gal_catalog, meas_matched = meas_gal_matched, truth_matched=truth_gal_matched,
sep = sep,filetag='',bandind=i, outdir=run_name)
#truth_star_catalog, truth_star_matched, meas_star_matched = get_catalogs(path = path, tilename = tilename, re_id = re_id, minsep = min_sep,\
# ratag = 'RA_new', dectag = 'DEC_new',stars=True)
#make_star_plots(truth=truth_star_catalog,meas_matched = meas_star_matched, truth_matched = truth_star_matched)
if __name__ == "__main__":
import pdb, traceback,sys
try:
main(sys.argv)
except:
thingtype, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
|
[
"matplotlib.pyplot.title",
"treecorr.GGCorrelation",
"os.remove",
"pdb.post_mortem",
"ngmix.shape.g1g2_to_e1e2",
"numpy.abs",
"treecorr.Catalog",
"fitsio.read",
"numpy.histogram",
"numpy.arange",
"numpy.exp",
"sys.exc_info",
"os.path.join",
"traceback.print_exc",
"matplotlib.pyplot.close",
"matplotlib.rcParams.update",
"numpy.linspace",
"numpy.log10",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.errorbar",
"matplotlib.pyplot.axhline",
"numpy.in1d",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.xscale",
"fitsio.FITS",
"matplotlib.pyplot.plot",
"os.makedirs",
"os.getcwd",
"numpy.array",
"esutil.htm.Matcher",
"matplotlib.pyplot.xlabel",
"numpy.sqrt"
] |
[((610, 700), 'esutil.htm.Matcher', 'htm.Matcher', ([], {'depth': '(14)', 'ra': 'truth_catalog[ratag_truth]', 'dec': 'truth_catalog[dectag_truth]'}), '(depth=14, ra=truth_catalog[ratag_truth], dec=truth_catalog[\n dectag_truth])\n', (621, 700), False, 'from esutil import htm\n'), ((1012, 1073), 'esutil.htm.Matcher', 'htm.Matcher', ([], {'depth': '(14)', 'ra': 'catalog[ratag]', 'dec': 'catalog[dectag]'}), '(depth=14, ra=catalog[ratag], dec=catalog[dectag])\n', (1023, 1073), False, 'from esutil import htm\n'), ((1256, 1279), 'numpy.arange', 'np.arange', (['catalog.size'], {}), '(catalog.size)\n', (1265, 1279), True, 'import numpy as np\n'), ((1291, 1327), 'numpy.in1d', 'np.in1d', (['all_inds', 'ind1'], {'invert': '(True)'}), '(all_inds, ind1, invert=True)\n', (1298, 1327), True, 'import numpy as np\n'), ((2417, 2464), 'os.path.join', 'os.path.join', (['path', 're_id', 'tilename', '"""mof"""', 'cat'], {}), "(path, re_id, tilename, 'mof', cat)\n", (2429, 2464), False, 'import os\n'), ((2484, 2505), 'fitsio.read', 'fitsio.read', (['filename'], {}), '(filename)\n', (2495, 2505), False, 'import fitsio\n'), ((2954, 2999), 'matplotlib.rcParams.update', 'matplotlib.rcParams.update', (["{'font.size': 16}"], {}), "({'font.size': 16})\n", (2980, 2999), False, 'import matplotlib\n'), ((3373, 3417), 'numpy.array', 'np.array', (['[0, 5, 10, 15, 20, 25, 30, 35, 40]'], {}), '([0, 5, 10, 15, 20, 25, 30, 35, 40])\n', (3381, 3417), True, 'import numpy as np\n'), ((3484, 3541), 'numpy.sqrt', 'np.sqrt', (["meas_matched['cm_flux_cov'][:, bandind, bandind]"], {}), "(meas_matched['cm_flux_cov'][:, bandind, bandind])\n", (3491, 3541), True, 'import numpy as np\n'), ((3648, 3695), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(3)', 'figsize': '(21, 7)'}), '(nrows=1, ncols=3, figsize=(21, 7))\n', (3660, 3695), True, 'import matplotlib.pyplot as plt\n'), ((4972, 5031), 'os.path.join', 'os.path.join', (['outdir', "('cm_flux-' + filetag + bands[bandind])"], {}), "(outdir, 'cm_flux-' + filetag + bands[bandind])\n", (4984, 5031), False, 'import os\n'), ((5057, 5071), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (5066, 5071), True, 'import matplotlib.pyplot as plt\n'), ((5138, 5160), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(26)'], {}), '(0, 10, 26)\n', (5149, 5160), True, 'import numpy as np\n'), ((5251, 5274), 'numpy.linspace', 'np.linspace', (['(15)', '(28)', '(11)'], {}), '(15, 28, 11)\n', (5262, 5274), True, 'import numpy as np\n'), ((5372, 5430), 'numpy.histogram', 'np.histogram', (["truth_matched['cm_mag']"], {'bins': 'flux_bin_edges'}), "(truth_matched['cm_mag'], bins=flux_bin_edges)\n", (5384, 5430), True, 'import numpy as np\n'), ((5458, 5508), 'numpy.histogram', 'np.histogram', (["truth['cm_mag']"], {'bins': 'flux_bin_edges'}), "(truth['cm_mag'], bins=flux_bin_edges)\n", (5470, 5508), True, 'import numpy as np\n'), ((5716, 5763), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(3)', 'figsize': '(21, 7)'}), '(nrows=1, ncols=3, figsize=(21, 7))\n', (5728, 5763), True, 'import matplotlib.pyplot as plt\n'), ((6291, 6355), 'os.path.join', 'os.path.join', (['outdir', "('completeness-' + filetag + bands[bandind])"], {}), "(outdir, 'completeness-' + filetag + bands[bandind])\n", (6303, 6355), False, 'import os\n'), ((6381, 6395), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (6390, 6395), True, 'import matplotlib.pyplot as plt\n'), ((6908, 6955), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(3)', 'figsize': '(21, 7)'}), '(nrows=1, ncols=3, figsize=(21, 7))\n', (6920, 6955), True, 'import matplotlib.pyplot as plt\n'), ((7592, 7632), 'os.path.join', 'os.path.join', (['outdir', '"""colors_vs_colors"""'], {}), "(outdir, 'colors_vs_colors')\n", (7604, 7632), False, 'import os\n'), ((7662, 7676), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (7671, 7676), True, 'import matplotlib.pyplot as plt\n'), ((7751, 7798), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(3)', 'figsize': '(21, 7)'}), '(nrows=1, ncols=3, figsize=(21, 7))\n', (7763, 7798), True, 'import matplotlib.pyplot as plt\n'), ((8462, 8500), 'os.path.join', 'os.path.join', (['outdir', '"""colors_vs_rmag"""'], {}), "(outdir, 'colors_vs_rmag')\n", (8474, 8500), False, 'import os\n'), ((8530, 8544), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (8539, 8544), True, 'import matplotlib.pyplot as plt\n'), ((8595, 8642), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(3)', 'figsize': '(21, 7)'}), '(nrows=1, ncols=3, figsize=(21, 7))\n', (8607, 8642), True, 'import matplotlib.pyplot as plt\n'), ((9367, 9402), 'os.path.join', 'os.path.join', (['outdir', '"""colors_vs_T"""'], {}), "(outdir, 'colors_vs_T')\n", (9379, 9402), False, 'import os\n'), ((9432, 9446), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (9441, 9446), True, 'import matplotlib.pyplot as plt\n'), ((9494, 9540), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(1)', 'figsize': '(7, 7)'}), '(nrows=1, ncols=1, figsize=(7, 7))\n', (9506, 9540), True, 'import matplotlib.pyplot as plt\n'), ((10106, 10159), 'os.path.join', 'os.path.join', (['outdir', "('t_vs_mag_dif' + bands[bandind])"], {}), "(outdir, 't_vs_mag_dif' + bands[bandind])\n", (10118, 10159), False, 'import os\n'), ((10187, 10201), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (10196, 10201), True, 'import matplotlib.pyplot as plt\n'), ((10255, 10301), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(1)', 'figsize': '(7, 7)'}), '(nrows=1, ncols=1, figsize=(7, 7))\n', (10267, 10301), True, 'import matplotlib.pyplot as plt\n'), ((10976, 11050), 'os.path.join', 'os.path.join', (['outdir', "('t_vs_mag_color-coded-by-delta-mag' + bands[bandind])"], {}), "(outdir, 't_vs_mag_color-coded-by-delta-mag' + bands[bandind])\n", (10988, 11050), False, 'import os\n'), ((11078, 11092), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (11087, 11092), True, 'import matplotlib.pyplot as plt\n'), ((11141, 11169), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(7, 7)'}), '(figsize=(7, 7))\n', (11153, 11169), True, 'import matplotlib.pyplot as plt\n'), ((11902, 11934), 'os.path.join', 'os.path.join', (['outdir', '"""sep_vs_t"""'], {}), "(outdir, 'sep_vs_t')\n", (11914, 11934), False, 'import os\n'), ((11964, 11978), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (11973, 11978), True, 'import matplotlib.pyplot as plt\n'), ((11994, 12022), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(7, 7)'}), '(figsize=(7, 7))\n', (12006, 12022), True, 'import matplotlib.pyplot as plt\n'), ((12859, 12887), 'os.path.join', 'os.path.join', (['outdir', '"""Terr"""'], {}), "(outdir, 'Terr')\n", (12871, 12887), False, 'import os\n'), ((12917, 12931), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (12926, 12931), True, 'import matplotlib.pyplot as plt\n'), ((12947, 12975), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(7, 7)'}), '(figsize=(7, 7))\n', (12959, 12975), True, 'import matplotlib.pyplot as plt\n'), ((13557, 13594), 'os.path.join', 'os.path.join', (['outdir', '"""logsb_vs_cm_T"""'], {}), "(outdir, 'logsb_vs_cm_T')\n", (13569, 13594), False, 'import os\n'), ((13624, 13638), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (13633, 13638), True, 'import matplotlib.pyplot as plt\n'), ((17639, 17658), 'numpy.exp', 'np.exp', (['gg.meanlogr'], {}), '(gg.meanlogr)\n', (17645, 17658), True, 'import numpy as np\n'), ((17703, 17720), 'numpy.sqrt', 'np.sqrt', (['gg.varxi'], {}), '(gg.varxi)\n', (17710, 17720), True, 'import numpy as np\n'), ((17726, 17756), 'matplotlib.pyplot.plot', 'plt.plot', (['r', 'xip'], {'color': '"""blue"""'}), "(r, xip, color='blue')\n", (17734, 17756), True, 'import matplotlib.pyplot as plt\n'), ((17807, 17896), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['r[xip > 0]', 'xip[xip > 0]'], {'yerr': 'sig[xip > 0]', 'color': '"""blue"""', 'lw': '(1)', 'ls': '""""""'}), "(r[xip > 0], xip[xip > 0], yerr=sig[xip > 0], color='blue', lw=\n 1, ls='')\n", (17819, 17896), True, 'import matplotlib.pyplot as plt\n'), ((17983, 18028), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['(-r)', 'xip'], {'yerr': 'sig', 'color': '"""blue"""'}), "(-r, xip, yerr=sig, color='blue')\n", (17995, 18028), True, 'import matplotlib.pyplot as plt\n'), ((18034, 18065), 'matplotlib.pyplot.plot', 'plt.plot', (['r', 'xim'], {'color': '"""green"""'}), "(r, xim, color='green')\n", (18042, 18065), True, 'import matplotlib.pyplot as plt\n'), ((18117, 18207), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['r[xim > 0]', 'xim[xim > 0]'], {'yerr': 'sig[xim > 0]', 'color': '"""green"""', 'lw': '(1)', 'ls': '""""""'}), "(r[xim > 0], xim[xim > 0], yerr=sig[xim > 0], color='green', lw\n =1, ls='')\n", (18129, 18207), True, 'import matplotlib.pyplot as plt\n'), ((18295, 18341), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['(-r)', 'xim'], {'yerr': 'sig', 'color': '"""green"""'}), "(-r, xim, yerr=sig, color='green')\n", (18307, 18341), True, 'import matplotlib.pyplot as plt\n'), ((18368, 18405), 'matplotlib.pyplot.axhline', 'plt.axhline', (['(0)'], {'linestyle': '"""--"""', 'c': '"""k"""'}), "(0, linestyle='--', c='k')\n", (18379, 18405), True, 'import matplotlib.pyplot as plt\n'), ((18411, 18428), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (18421, 18428), True, 'import matplotlib.pyplot as plt\n'), ((18571, 18603), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$ (arcmin)"""'], {}), "('$\\\\theta$ (arcmin)')\n", (18581, 18603), True, 'import matplotlib.pyplot as plt\n'), ((18609, 18673), 'matplotlib.pyplot.legend', 'plt.legend', (['[lp, lm]', "['$\\\\xi_+(\\\\theta)$', '$\\\\xi_-(\\\\theta)$']"], {}), "([lp, lm], ['$\\\\xi_+(\\\\theta)$', '$\\\\xi_-(\\\\theta)$'])\n", (18619, 18673), True, 'import matplotlib.pyplot as plt\n'), ((18702, 18728), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$\\\\xi_{+,-}$"""'], {}), "('$\\\\xi_{+,-}$')\n", (18712, 18728), True, 'import matplotlib.pyplot as plt\n'), ((18733, 18765), 'matplotlib.pyplot.title', 'plt.title', (['plt_type'], {'fontsize': '(16)'}), '(plt_type, fontsize=16)\n', (18742, 18765), True, 'import matplotlib.pyplot as plt\n'), ((18814, 18823), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (18821, 18823), True, 'import matplotlib.pyplot as plt\n'), ((19217, 19282), 'os.path.join', 'os.path.join', (["('/home/spencer/research/balrog/outputs/' + run_name)"], {}), "('/home/spencer/research/balrog/outputs/' + run_name)\n", (19229, 19282), False, 'import os\n'), ((1983, 2023), 'os.path.join', 'os.path.join', (['path', 're_id', 'tilename', 'cat'], {}), '(path, re_id, tilename, cat)\n', (1995, 2023), False, 'import os\n'), ((2048, 2069), 'fitsio.read', 'fitsio.read', (['filename'], {}), '(filename)\n', (2059, 2069), False, 'import fitsio\n'), ((2163, 2203), 'os.path.join', 'os.path.join', (['path', 're_id', 'tilename', 'cat'], {}), '(path, re_id, tilename, cat)\n', (2175, 2203), False, 'import os\n'), ((2228, 2249), 'fitsio.read', 'fitsio.read', (['filename'], {}), '(filename)\n', (2239, 2249), False, 'import fitsio\n'), ((3041, 3052), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3050, 3052), False, 'import os\n'), ((4088, 4117), 'numpy.log10', 'np.log10', (['flux_tick_locations'], {}), '(flux_tick_locations)\n', (4096, 4117), True, 'import numpy as np\n'), ((5551, 5582), 'numpy.log10', 'np.log10', (["truth_matched['cm_T']"], {}), "(truth_matched['cm_T'])\n", (5559, 5582), True, 'import numpy as np\n'), ((5646, 5669), 'numpy.log10', 'np.log10', (["truth['cm_T']"], {}), "(truth['cm_T'])\n", (5654, 5669), True, 'import numpy as np\n'), ((6097, 6120), 'numpy.log10', 'np.log10', (["truth['cm_T']"], {}), "(truth['cm_T'])\n", (6105, 6120), True, 'import numpy as np\n'), ((6168, 6199), 'numpy.log10', 'np.log10', (["truth_matched['cm_T']"], {}), "(truth_matched['cm_T'])\n", (6176, 6199), True, 'import numpy as np\n'), ((9706, 9733), 'numpy.arange', 'np.arange', (['(n_delta_bins + 1)'], {}), '(n_delta_bins + 1)\n', (9715, 9733), True, 'import numpy as np\n'), ((10467, 10494), 'numpy.arange', 'np.arange', (['(n_delta_bins + 1)'], {}), '(n_delta_bins + 1)\n', (10476, 10494), True, 'import numpy as np\n'), ((11337, 11364), 'numpy.arange', 'np.arange', (['(n_delta_bins + 1)'], {}), '(n_delta_bins + 1)\n', (11346, 11364), True, 'import numpy as np\n'), ((13981, 14013), 'ngmix.shape.g1g2_to_e1e2', 'ngmix.shape.g1g2_to_e1e2', (['g1', 'g2'], {}), '(g1, g2)\n', (14005, 14013), False, 'import ngmix\n'), ((14107, 14160), 'os.path.join', 'os.path.join', (['outdir', '"""treecorr_temp_file_truth.fits"""'], {}), "(outdir, 'treecorr_temp_file_truth.fits')\n", (14119, 14160), False, 'import os\n'), ((14210, 14242), 'fitsio.FITS', 'fitsio.FITS', (['truth_outfile', '"""rw"""'], {}), "(truth_outfile, 'rw')\n", (14221, 14242), False, 'import fitsio\n'), ((14445, 14576), 'treecorr.Catalog', 'treecorr.Catalog', (['truth_outfile'], {'ra_col': '"""ra"""', 'dec_col': '"""dec"""', 'ra_units': '"""degrees"""', 'dec_units': '"""degrees"""', 'g1_col': '"""e1"""', 'g2_col': '"""e2"""'}), "(truth_outfile, ra_col='ra', dec_col='dec', ra_units=\n 'degrees', dec_units='degrees', g1_col='e1', g2_col='e2')\n", (14461, 14576), False, 'import treecorr\n'), ((14641, 14729), 'treecorr.GGCorrelation', 'treecorr.GGCorrelation', ([], {'min_sep': 'mins', 'max_sep': 'maxs', 'bin_size': 'bs', 'sep_units': '"""arcmin"""'}), "(min_sep=mins, max_sep=maxs, bin_size=bs, sep_units=\n 'arcmin')\n", (14663, 14729), False, 'import treecorr\n'), ((14852, 14893), 'os.path.join', 'os.path.join', (['outdir', '"""gg_corr_truth.png"""'], {}), "(outdir, 'gg_corr_truth.png')\n", (14864, 14893), False, 'import os\n'), ((14931, 14945), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (14940, 14945), True, 'import matplotlib.pyplot as plt\n'), ((15069, 15101), 'ngmix.shape.g1g2_to_e1e2', 'ngmix.shape.g1g2_to_e1e2', (['g1', 'g2'], {}), '(g1, g2)\n', (15093, 15101), False, 'import ngmix\n'), ((15194, 15246), 'os.path.join', 'os.path.join', (['outdir', '"""treecorr_temp_file_meas.fits"""'], {}), "(outdir, 'treecorr_temp_file_meas.fits')\n", (15206, 15246), False, 'import os\n'), ((15295, 15340), 'fitsio.FITS', 'fitsio.FITS', (['meas_outfile', '"""rw"""'], {'clobber': '(True)'}), "(meas_outfile, 'rw', clobber=True)\n", (15306, 15340), False, 'import fitsio\n'), ((15541, 15671), 'treecorr.Catalog', 'treecorr.Catalog', (['meas_outfile'], {'ra_col': '"""ra"""', 'dec_col': '"""dec"""', 'ra_units': '"""degrees"""', 'dec_units': '"""degrees"""', 'g1_col': '"""e1"""', 'g2_col': '"""e2"""'}), "(meas_outfile, ra_col='ra', dec_col='dec', ra_units=\n 'degrees', dec_units='degrees', g1_col='e1', g2_col='e2')\n", (15557, 15671), False, 'import treecorr\n'), ((15736, 15824), 'treecorr.GGCorrelation', 'treecorr.GGCorrelation', ([], {'min_sep': 'mins', 'max_sep': 'maxs', 'bin_size': 'bs', 'sep_units': '"""arcmin"""'}), "(min_sep=mins, max_sep=maxs, bin_size=bs, sep_units=\n 'arcmin')\n", (15758, 15824), False, 'import treecorr\n'), ((15950, 15990), 'os.path.join', 'os.path.join', (['outdir', '"""gg_corr_meas.png"""'], {}), "(outdir, 'gg_corr_meas.png')\n", (15962, 15990), False, 'import os\n'), ((16028, 16042), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (16037, 16042), True, 'import matplotlib.pyplot as plt\n'), ((16167, 16201), 'ngmix.shape.g1g2_to_e1e2', 'ngmix.shape.g1g2_to_e1e2', (['g1t', 'g2t'], {}), '(g1t, g2t)\n', (16191, 16201), False, 'import ngmix\n'), ((16293, 16327), 'ngmix.shape.g1g2_to_e1e2', 'ngmix.shape.g1g2_to_e1e2', (['g1m', 'g2m'], {}), '(g1m, g2m)\n', (16317, 16327), False, 'import ngmix\n'), ((16492, 16544), 'os.path.join', 'os.path.join', (['outdir', '"""treecorr_temp_file_diff.fits"""'], {}), "(outdir, 'treecorr_temp_file_diff.fits')\n", (16504, 16544), False, 'import os\n'), ((16593, 16638), 'fitsio.FITS', 'fitsio.FITS', (['diff_outfile', '"""rw"""'], {'clobber': '(True)'}), "(diff_outfile, 'rw', clobber=True)\n", (16604, 16638), False, 'import fitsio\n'), ((16845, 16975), 'treecorr.Catalog', 'treecorr.Catalog', (['diff_outfile'], {'ra_col': '"""ra"""', 'dec_col': '"""dec"""', 'ra_units': '"""degrees"""', 'dec_units': '"""degrees"""', 'g1_col': '"""e1"""', 'g2_col': '"""e2"""'}), "(diff_outfile, ra_col='ra', dec_col='dec', ra_units=\n 'degrees', dec_units='degrees', g1_col='e1', g2_col='e2')\n", (16861, 16975), False, 'import treecorr\n'), ((17040, 17128), 'treecorr.GGCorrelation', 'treecorr.GGCorrelation', ([], {'min_sep': 'mins', 'max_sep': 'maxs', 'bin_size': 'bs', 'sep_units': '"""arcmin"""'}), "(min_sep=mins, max_sep=maxs, bin_size=bs, sep_units=\n 'arcmin')\n", (17062, 17128), False, 'import treecorr\n'), ((17259, 17299), 'os.path.join', 'os.path.join', (['outdir', '"""gg_corr_diff.png"""'], {}), "(outdir, 'gg_corr_diff.png')\n", (17271, 17299), False, 'import os\n'), ((17337, 17351), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (17346, 17351), True, 'import matplotlib.pyplot as plt\n'), ((17397, 17416), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (17406, 17416), False, 'import os\n'), ((1461, 1481), 'numpy.in1d', 'np.in1d', (['bands', 'band'], {}), '(bands, band)\n', (1468, 1481), True, 'import numpy as np\n'), ((3088, 3107), 'os.makedirs', 'os.makedirs', (['outdir'], {}), '(outdir)\n', (3099, 3107), False, 'import os\n'), ((4413, 4463), 'numpy.sqrt', 'np.sqrt', (['meas_matched[errtag][:, bandind, bandind]'], {}), '(meas_matched[errtag][:, bandind, bandind])\n', (4420, 4463), True, 'import numpy as np\n'), ((4762, 4812), 'numpy.sqrt', 'np.sqrt', (['meas_matched[errtag][:, bandind, bandind]'], {}), '(meas_matched[errtag][:, bandind, bandind])\n', (4769, 4812), True, 'import numpy as np\n'), ((4816, 4841), 'numpy.linspace', 'np.linspace', (['(-25)', '(25)', '(250)'], {}), '(-25, 25, 250)\n', (4827, 4841), True, 'import numpy as np\n'), ((12602, 12615), 'numpy.log10', 'np.log10', (['xlo'], {}), '(xlo)\n', (12610, 12615), True, 'import numpy as np\n'), ((12616, 12629), 'numpy.log10', 'np.log10', (['xhi'], {}), '(xhi)\n', (12624, 12629), True, 'import numpy as np\n'), ((12648, 12661), 'numpy.log10', 'np.log10', (['xlo'], {}), '(xlo)\n', (12656, 12661), True, 'import numpy as np\n'), ((12662, 12675), 'numpy.log10', 'np.log10', (['xhi'], {}), '(xhi)\n', (12670, 12675), True, 'import numpy as np\n'), ((12735, 12748), 'numpy.log10', 'np.log10', (['xlo'], {}), '(xlo)\n', (12743, 12748), True, 'import numpy as np\n'), ((12749, 12762), 'numpy.log10', 'np.log10', (['xhi'], {}), '(xhi)\n', (12757, 12762), True, 'import numpy as np\n'), ((18771, 18780), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (18778, 18780), True, 'import matplotlib.pyplot as plt\n'), ((20803, 20817), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (20815, 20817), False, 'import pdb, traceback, sys\n'), ((20826, 20847), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (20845, 20847), False, 'import pdb, traceback, sys\n'), ((20856, 20875), 'pdb.post_mortem', 'pdb.post_mortem', (['tb'], {}), '(tb)\n', (20871, 20875), False, 'import pdb, traceback, sys\n'), ((9816, 9833), 'numpy.abs', 'np.abs', (['delta_mag'], {}), '(delta_mag)\n', (9822, 9833), True, 'import numpy as np\n'), ((9857, 9874), 'numpy.abs', 'np.abs', (['delta_mag'], {}), '(delta_mag)\n', (9863, 9874), True, 'import numpy as np\n'), ((10577, 10594), 'numpy.abs', 'np.abs', (['delta_mag'], {}), '(delta_mag)\n', (10583, 10594), True, 'import numpy as np\n'), ((10618, 10635), 'numpy.abs', 'np.abs', (['delta_mag'], {}), '(delta_mag)\n', (10624, 10635), True, 'import numpy as np\n'), ((11447, 11464), 'numpy.abs', 'np.abs', (['delta_mag'], {}), '(delta_mag)\n', (11453, 11464), True, 'import numpy as np\n'), ((11488, 11505), 'numpy.abs', 'np.abs', (['delta_mag'], {}), '(delta_mag)\n', (11494, 11505), True, 'import numpy as np\n'), ((12782, 12795), 'numpy.log10', 'np.log10', (['xlo'], {}), '(xlo)\n', (12790, 12795), True, 'import numpy as np\n'), ((12796, 12809), 'numpy.log10', 'np.log10', (['xhi'], {}), '(xhi)\n', (12804, 12809), True, 'import numpy as np\n'), ((13027, 13044), 'numpy.abs', 'np.abs', (['delta_mag'], {}), '(delta_mag)\n', (13033, 13044), True, 'import numpy as np\n'), ((13069, 13086), 'numpy.abs', 'np.abs', (['delta_mag'], {}), '(delta_mag)\n', (13075, 13086), True, 'import numpy as np\n'), ((13162, 13228), 'numpy.abs', 'np.abs', (["(meas_matched['cm_T'][these] - truth_matched['cm_T'][these])"], {}), "(meas_matched['cm_T'][these] - truth_matched['cm_T'][these])\n", (13168, 13228), True, 'import numpy as np\n'), ((313, 327), 'numpy.log10', 'np.log10', (['flux'], {}), '(flux)\n', (321, 327), True, 'import numpy as np\n')]
|
# reference implementation of HIGGS training
import argparse
import os
# import pandas as pd
import torch
import torch.nn as nn
import torch.optim as optim
import torchvision.transforms as transforms
from torch.utils.data import DataLoader, random_split
from tqdm import tqdm
# for reproducibility inits
import random
import numpy as np
# give this to each dataloader
def dataloader_seed_worker(worker_id):
worker_seed = torch.initial_seed() % 2 ** 32
np.random.seed(worker_seed)
random.seed(worker_seed)
# higgs dataset
def load_HIGGS(root, transform, n_row_limit=400_000):
"""Download the dataset manually from
https://archive.ics.uci.edu/ml/datasets/HIGGS and copy it into
`root`.
"""
n_train_limit = int(0.9 * n_row_limit)
data = pd.read_csv(
os.path.join(root, "HIGGS.csv"), header=None, dtype="float32", nrows=n_row_limit
)
data_train = data[:n_train_limit].reset_index(drop=True).values
data_test = data[n_train_limit:].reset_index(drop=True).values
class HIGGSTrainDataSet:
def __getitem__(self, idx):
return data_train[idx][1:], int(data_train[idx][0])
def __len__(self):
return len(data_train)
class HIGGSTestDataSet:
def __getitem__(self, idx):
return data_test[idx][1:], int(data_test[idx][0])
def __len__(self):
return len(data_test)
return HIGGSTrainDataSet(), HIGGSTestDataSet()
# networks
class MLPNet(nn.Module):
"""
X layer fully connected network
"""
def __init__(self):
super(MLPNet, self).__init__()
self.fc1 = nn.Linear(28, 300)
self.fc2 = nn.Linear(300, 300)
self.fc3 = nn.Linear(300, 300)
self.fc4 = nn.Linear(300, 1)
def forward(self, x):
x = torch.tanh(self.fc1(x))
x = torch.tanh(self.fc2(x))
x = torch.tanh(self.fc3(x))
x = torch.sigmoid(self.fc4(x))
return x
def train(model, optimizer, data_loader):
model.train()
for batch_idx, (x, target) in enumerate(tqdm(data_loader)):
optimizer.zero_grad()
if use_cuda:
x, target = x.cuda(), target.cuda()
out = model(x)
loss = criterion(out.squeeze(), target.to(torch.float))
loss.backward()
optimizer.step()
return model
def test(model, data_loader):
correct_cnt = 0
total_cnt = 0
summed_loss = 0
model.eval()
for batch_idx, (x, target) in enumerate(tqdm(data_loader)):
if use_cuda:
x, target = x.cuda(), target.cuda()
out = model(x)
loss = criterion(out.squeeze(), target.to(torch.float))
pred_label = (out.squeeze().data >= 0.5).to(torch.int)
correct_cnt += (pred_label == target.data).sum()
summed_loss += loss.detach().cpu().numpy()
total_cnt += x.shape[0]
return summed_loss, correct_cnt, total_cnt
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Train a Neural Network on HIGGS and check accuracy.')
parser.add_argument('--batch_size', default=128, type=int, help="Batch size for training")
parser.add_argument('--epochs', default=100, type=int, help='Number of epochs to train.')
parser.add_argument('--seed', default=7, type=int, help='Seed for reproducibility.')
args = parser.parse_args()
# reproducibility
seed = args.seed
# reproducibility
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed) # if you are using multi-GPU.
np.random.seed(seed) # Numpy module.
random.seed(seed) # Python random module.
torch.manual_seed(seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
batch_size = args.batch_size
use_cuda = torch.cuda.is_available()
train_set, test_set = load_HIGGS(
root="experiments/HIGGS/higgs_data/", transform=transforms.ToTensor()
)
val_size = int(0.05 * len(train_set))
train_size = len(train_set) - val_size
train_set, val_set = random_split(
train_set, [train_size, val_size], generator=torch.Generator().manual_seed(seed)
)
# SGD (row_limit: 400_000, n_epochs=20)
# lr=0.05: Final validation loss: 0.004345, acc: 0.715
# lr=0.1: Final validation loss: 0.004203, acc: 0.725
# lr=0.5: Final validation loss: 0.004201, acc: 0.726
# lr=1.0: Final validation loss: 0.004280, acc: 0.720
# ADAM (row_limit: 400_000, n_epochs=20)
# lr=1e-4: Final validation loss: 0.004180, acc: 0.727
# lr=5e-4: Final validation loss: 0.004025, acc: 0.746
# lr=1e-3: Final validation loss: 0.004112, acc: 0.740
n_epochs = args.epochs
# for lr in [1e-4, 5e-4, 1e-3]:
for lr in [0.5]:
print()
print("lr", lr)
num_workers = 1
train_loader = DataLoader(
dataset=train_set,
batch_size=batch_size,
shuffle=True,
num_workers=num_workers,
pin_memory=True,
worker_init_fn=dataloader_seed_worker,
)
val_loader = DataLoader(
dataset=val_set,
batch_size=batch_size,
shuffle=False,
num_workers=num_workers,
pin_memory=True,
worker_init_fn=dataloader_seed_worker,
)
test_loader = DataLoader(
dataset=test_set,
batch_size=batch_size,
shuffle=False,
num_workers=num_workers,
pin_memory=True,
worker_init_fn=dataloader_seed_worker,
)
print(f"Total training batches: {len(train_loader)}")
print(f"Total validation batches: {len(val_loader)}")
print(f"Total testing batches: {len(test_loader)}")
torch.manual_seed(seed)
model = MLPNet()
if use_cuda:
model = model.cuda()
# optimizer = optim.Adam(model.parameters(), lr=lr)
optimizer = optim.SGD(model.parameters(), lr=lr)
criterion = nn.BCELoss()
summed_loss, correct_cnt, total_cnt = test(model, val_loader)
print(
f"Initial loss: {summed_loss / total_cnt:.6f}, acc: {correct_cnt / total_cnt:.3f}"
)
history = np.zeros((n_epochs + 1, 2))
history[0, 0] = summed_loss / total_cnt
history[0, 1] = correct_cnt / total_cnt
for epoch in range(n_epochs):
model = train(model, optimizer, train_loader)
summed_loss, correct_cnt, total_cnt = test(model, val_loader)
history[epoch + 1, 0] = summed_loss / total_cnt
history[epoch + 1, 1] = correct_cnt / total_cnt
print(correct_cnt/total_cnt)
print(
f"Final loss: {summed_loss / total_cnt:.6f}, acc: {correct_cnt / total_cnt:.3f}"
)
# summed_loss, correct_cnt, total_cnt = test(model, test_loader)
# print()
# print(
# f"\nTest loss: {summed_loss / total_cnt:.6f}, test acc: {correct_cnt / total_cnt:.3f}"
# )
# torch.save(model.state_dict(), f"{model.__class__.__name__}.pt")
# np.save("history_higgs.npy", history)
|
[
"tqdm.tqdm",
"numpy.random.seed",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"torch.nn.BCELoss",
"torch.manual_seed",
"torch.cuda.manual_seed",
"numpy.zeros",
"torch.cuda.manual_seed_all",
"random.seed",
"torch.cuda.is_available",
"torch.initial_seed",
"torch.nn.Linear",
"os.path.join",
"torch.Generator",
"torchvision.transforms.ToTensor"
] |
[((464, 491), 'numpy.random.seed', 'np.random.seed', (['worker_seed'], {}), '(worker_seed)\n', (478, 491), True, 'import numpy as np\n'), ((496, 520), 'random.seed', 'random.seed', (['worker_seed'], {}), '(worker_seed)\n', (507, 520), False, 'import random\n'), ((2952, 3047), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Train a Neural Network on HIGGS and check accuracy."""'}), "(description=\n 'Train a Neural Network on HIGGS and check accuracy.')\n", (2975, 3047), False, 'import argparse\n'), ((3424, 3447), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (3441, 3447), False, 'import torch\n'), ((3452, 3480), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed'], {}), '(seed)\n', (3474, 3480), False, 'import torch\n'), ((3485, 3517), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['seed'], {}), '(seed)\n', (3511, 3517), False, 'import torch\n'), ((3553, 3573), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (3567, 3573), True, 'import numpy as np\n'), ((3595, 3612), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (3606, 3612), False, 'import random\n'), ((3642, 3665), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (3659, 3665), False, 'import torch\n'), ((3805, 3830), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3828, 3830), False, 'import torch\n'), ((429, 449), 'torch.initial_seed', 'torch.initial_seed', ([], {}), '()\n', (447, 449), False, 'import torch\n'), ((797, 828), 'os.path.join', 'os.path.join', (['root', '"""HIGGS.csv"""'], {}), "(root, 'HIGGS.csv')\n", (809, 828), False, 'import os\n'), ((1626, 1644), 'torch.nn.Linear', 'nn.Linear', (['(28)', '(300)'], {}), '(28, 300)\n', (1635, 1644), True, 'import torch.nn as nn\n'), ((1664, 1683), 'torch.nn.Linear', 'nn.Linear', (['(300)', '(300)'], {}), '(300, 300)\n', (1673, 1683), True, 'import torch.nn as nn\n'), ((1703, 1722), 'torch.nn.Linear', 'nn.Linear', (['(300)', '(300)'], {}), '(300, 300)\n', (1712, 1722), True, 'import torch.nn as nn\n'), ((1742, 1759), 'torch.nn.Linear', 'nn.Linear', (['(300)', '(1)'], {}), '(300, 1)\n', (1751, 1759), True, 'import torch.nn as nn\n'), ((2057, 2074), 'tqdm.tqdm', 'tqdm', (['data_loader'], {}), '(data_loader)\n', (2061, 2074), False, 'from tqdm import tqdm\n'), ((2482, 2499), 'tqdm.tqdm', 'tqdm', (['data_loader'], {}), '(data_loader)\n', (2486, 2499), False, 'from tqdm import tqdm\n'), ((4849, 5005), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'train_set', 'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': 'num_workers', 'pin_memory': '(True)', 'worker_init_fn': 'dataloader_seed_worker'}), '(dataset=train_set, batch_size=batch_size, shuffle=True,\n num_workers=num_workers, pin_memory=True, worker_init_fn=\n dataloader_seed_worker)\n', (4859, 5005), False, 'from torch.utils.data import DataLoader, random_split\n'), ((5101, 5256), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'val_set', 'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': 'num_workers', 'pin_memory': '(True)', 'worker_init_fn': 'dataloader_seed_worker'}), '(dataset=val_set, batch_size=batch_size, shuffle=False,\n num_workers=num_workers, pin_memory=True, worker_init_fn=\n dataloader_seed_worker)\n', (5111, 5256), False, 'from torch.utils.data import DataLoader, random_split\n'), ((5353, 5509), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'test_set', 'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': 'num_workers', 'pin_memory': '(True)', 'worker_init_fn': 'dataloader_seed_worker'}), '(dataset=test_set, batch_size=batch_size, shuffle=False,\n num_workers=num_workers, pin_memory=True, worker_init_fn=\n dataloader_seed_worker)\n', (5363, 5509), False, 'from torch.utils.data import DataLoader, random_split\n'), ((5778, 5801), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (5795, 5801), False, 'import torch\n'), ((6020, 6032), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (6030, 6032), True, 'import torch.nn as nn\n'), ((6242, 6269), 'numpy.zeros', 'np.zeros', (['(n_epochs + 1, 2)'], {}), '((n_epochs + 1, 2))\n', (6250, 6269), True, 'import numpy as np\n'), ((3926, 3947), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (3945, 3947), True, 'import torchvision.transforms as transforms\n'), ((4133, 4150), 'torch.Generator', 'torch.Generator', ([], {}), '()\n', (4148, 4150), False, 'import torch\n')]
|
###############################################################################
#
# Source object color: https://www.pyimagesearch.com/2016/02/15/determining-object-color-with-opencv/
#
# June 28, 2018
#
###############################################################################
# import the necessary packages
from scipy.spatial import distance as dist
from collections import OrderedDict
import numpy as np
import cv2
class ColorLabeler(object):
def __init__(self, color_dictionary = {"red": (255, 0, 0),"green": (0, 255, 0),"blue": (0, 0, 255)}):
"""
Initialize the colors dictionary, containing the color name as the key and the RGB tuple as the value.
For typical Lego bricks you can use somthing like:
color_dictionary = {"red": (100, 30, 50),"green": (25, 65, 45),"blue": (24, 43, 105),"orange": (160, 90, 50)}
This will be dependant on lightig and camera, so sample your RGB tuple and use it to initialize
the detector.
"""
# RGB color dictionary
colors = OrderedDict(color_dictionary)
# allocate memory for the L*a*b* image, then initialize
# the color names list
self.lab = np.zeros((len(colors), 1, 3), dtype="uint8")
self.colorNames = []
# loop over the colors dictionary
for (i, (name, rgb)) in enumerate(colors.items()):
# update the L*a*b* array and the color names list
self.lab[i] = rgb
self.colorNames.append(name)
# convert the L*a*b* array from the RGB color space
# to L*a*b*
#cv2.imshow('self lab RGB', self.lab)
self.lab = cv2.cvtColor(self.lab, cv2.COLOR_RGB2LAB)
#cv2.imshow('self lab', self.lab)
def label(self, image, c):
"""Color detector and labler. Returns string with color name from dictionary."""
# construct a mask for the contour, then compute the
# average L*a*b* value for the masked region
mask = np.zeros(image.shape[:2], dtype="uint8")
cv2.drawContours(mask, [c], -1, 255, -1)
mask = cv2.erode(mask, None, iterations=2)
mean = cv2.mean(image, mask=mask)[:3]
# initialize the minimum distance found thus far
minDist = (np.inf, None)
# loop over the known L*a*b* color values
for (i, row) in enumerate(self.lab):
# compute the distance between the current L*a*b*
# color value and the mean of the image
d = dist.euclidean(row[0], mean)
# if the distance is smaller than the current distance,
# then update the bookkeeping variable
if d < minDist[0]:
minDist = (d, i)
# return the name of the color with the smallest distance
return self.colorNames[minDist[1]]
|
[
"scipy.spatial.distance.euclidean",
"cv2.erode",
"cv2.cvtColor",
"numpy.zeros",
"collections.OrderedDict",
"cv2.drawContours",
"cv2.mean"
] |
[((1053, 1082), 'collections.OrderedDict', 'OrderedDict', (['color_dictionary'], {}), '(color_dictionary)\n', (1064, 1082), False, 'from collections import OrderedDict\n'), ((1654, 1695), 'cv2.cvtColor', 'cv2.cvtColor', (['self.lab', 'cv2.COLOR_RGB2LAB'], {}), '(self.lab, cv2.COLOR_RGB2LAB)\n', (1666, 1695), False, 'import cv2\n'), ((1989, 2029), 'numpy.zeros', 'np.zeros', (['image.shape[:2]'], {'dtype': '"""uint8"""'}), "(image.shape[:2], dtype='uint8')\n", (1997, 2029), True, 'import numpy as np\n'), ((2038, 2078), 'cv2.drawContours', 'cv2.drawContours', (['mask', '[c]', '(-1)', '(255)', '(-1)'], {}), '(mask, [c], -1, 255, -1)\n', (2054, 2078), False, 'import cv2\n'), ((2094, 2129), 'cv2.erode', 'cv2.erode', (['mask', 'None'], {'iterations': '(2)'}), '(mask, None, iterations=2)\n', (2103, 2129), False, 'import cv2\n'), ((2145, 2171), 'cv2.mean', 'cv2.mean', (['image'], {'mask': 'mask'}), '(image, mask=mask)\n', (2153, 2171), False, 'import cv2\n'), ((2493, 2521), 'scipy.spatial.distance.euclidean', 'dist.euclidean', (['row[0]', 'mean'], {}), '(row[0], mean)\n', (2507, 2521), True, 'from scipy.spatial import distance as dist\n')]
|
# coding=utf-8
# Copyright Huawei Noah's Ark Lab.
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Collection of input pipelines.
An input pipeline defines how to read and parse data. It produces a tuple
of (features, labels) that can be read by tf.learn estimators.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import abc
import sys
import six
import math
import numpy as np
import tensorflow as tf
from noahnmt.data.input_pipeline import InputPipeline
# from noahnmt.utils import align_utils
from noahnmt.utils import constant_utils
from noahnmt.utils import registry
from noahnmt.layers import common_layers as common_utils
@registry.register_class
class ParallelTextInputPipeline(InputPipeline):
"""An input pipeline that reads two parallel (line-by-line aligned) text
files.
Params:
source_files: An array of file names for the source data.
target_files: An array of file names for the target data. These must
be aligned to the `source_files`.
source_delimiter: A character to split the source text on. Defaults
to " " (space). For character-level training this can be set to the
empty string.
target_delimiter: Same as `source_delimiter` but for the target text.
"""
@staticmethod
def default_params():
params = InputPipeline.default_params()
params.update({
"source_files": "",
"target_files": "",
"align_file": None,
"kd_ref_file": None,
"source_max_len": None,
"target_max_len": None,
"filter_long_sents": True,
"source_delimiter": " ",
"target_delimiter": " ",
"fix_batch": True,
"source_reverse": False,
"shuffle": True,
"pad_to_eight": False,
})
return params
def _create_batched_dataset(self, src_tgt_dataset, seed=None, **kwargs):
src_max_len = self.params["source_max_len"]
tgt_max_len = self.params["target_max_len"]
batch_size = self.params["batch_size"]
num_threads = self.params["num_threads"]
output_buffer_size = self.params["output_buffer_size"]
eos = self.params["eos"]
sos = self.params["sos"]
if output_buffer_size is None:
output_buffer_size = 1000 * batch_size * self.params["batch_multiplier"]
if not self.params["fix_batch"]:
# in this case, batch_size is usually a large value
# divide by an approximate average sentence length
output_buffer_size //= 20
# use dict for convenience
src_tgt_dataset = src_tgt_dataset.map(
lambda src, tgt: {"source_tokens":src, "target_tokens": tgt},
num_parallel_calls=num_threads).prefetch(output_buffer_size)
align_file = self.params["align_file"]
# if align_file:
# assert self.mode == tf.estimator.ModeKeys.TRAIN
# align_dataset = align_utils.get_align_dataset(align_file)
# src_tgt_dataset = tf.data.Dataset.zip((src_tgt_dataset, align_dataset))
# src_tgt_dataset = src_tgt_dataset.map(
# lambda dict_, align: dict_.update({
# "align_indices": align[0],
# "align_values": align[1]}) or dict_,
# num_parallel_calls=num_threads).prefetch(output_buffer_size)
# # filter out empty-align lines, only contain </s>-</s>
# src_tgt_dataset = src_tgt_dataset.filter(
# lambda dict_: tf.size(dict_["align_values"]) > 1)
kd_ref_file = self.params["kd_ref_file"]
if kd_ref_file:
kd_dataset = tf.data.TextLineDataset(kd_ref_file)
src_tgt_dataset = tf.data.Dataset.zip((src_tgt_dataset, kd_dataset))
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_, kd: dict_.update({
"kd_ref_tokens": kd}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
if self.params["num_shards"] > 1 and self.mode == tf.estimator.ModeKeys.TRAIN:
src_tgt_dataset = src_tgt_dataset.shard(
num_shards=self.params["num_shards"],
index=self.params["shard_index"])
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"source_tokens": tf.string_split([dict_["source_tokens"]]).values, # src
"target_tokens": tf.string_split([dict_["target_tokens"]]).values, # tgt
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
if kd_ref_file:
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"kd_ref_tokens": tf.string_split([dict_["kd_ref_tokens"]]).values, # tgt
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
# Filter zero length input sequences.
src_tgt_dataset = src_tgt_dataset.filter(
lambda dict_: tf.logical_and(
tf.size(dict_["source_tokens"]) > 0,
tf.size(dict_["target_tokens"]) > 0))
if kd_ref_file:
src_tgt_dataset = src_tgt_dataset.filter(
lambda dict_: tf.size(dict_["kd_ref_tokens"]) > 0)
if self.mode == tf.estimator.ModeKeys.TRAIN and self.params["shuffle"]:
src_tgt_dataset = src_tgt_dataset.shuffle(
output_buffer_size, seed)
# num_epochs loops before batching to avoid frequent small batches
# after shuffle to avoid repeat sentences in the same batch
src_tgt_dataset = src_tgt_dataset.repeat()
if src_max_len:
if self.params["filter_long_sents"]:
src_tgt_dataset = src_tgt_dataset.filter(
lambda dict_: tf.size(dict_["source_tokens"]) <= src_max_len)
else:
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"source_tokens": dict_["source_tokens"][:src_max_len]
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
if tgt_max_len:
if self.params["filter_long_sents"]:
src_tgt_dataset = src_tgt_dataset.filter(
lambda dict_: tf.size(dict_["target_tokens"]) <= tgt_max_len)
if kd_ref_file:
src_tgt_dataset = src_tgt_dataset.filter(
lambda dict_: tf.size(dict_["kd_ref_tokens"]) <= tgt_max_len)
else:
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"target_tokens": dict_["target_tokens"][:tgt_max_len]
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
if kd_ref_file:
raise NotImplementedError("kd_ref_file")
if self.params["source_reverse"]:
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"source_tokens": tf.reverse(dict_["source_tokens"], axis=[0])
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
# Create a tgt prefixed with <sos> and suffixed with <eos>.
# src suffixed with <eos>
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"source_tokens": tf.concat((dict_["source_tokens"], [eos]), 0),
"target_tokens": tf.concat(([sos], dict_["target_tokens"], [eos]), 0),
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
if self.params["source_sos"]:
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"source_tokens": tf.concat(([sos], dict_["source_tokens"]), 0),
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
if kd_ref_file:
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"kd_ref_tokens": tf.concat(([sos], dict_["kd_ref_tokens"], [eos]), 0),
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
# Add in the word counts. Subtract one from the target to avoid counting
# the target_input <eos> tag (resp. target_output <sos> tag).
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"source_len": tf.size(dict_["source_tokens"]),
"target_len": tf.size(dict_["target_tokens"])-1
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
if kd_ref_file:
src_tgt_dataset = src_tgt_dataset.map(
lambda dict_: dict_.update({
"kd_ref_len": tf.size(dict_["kd_ref_tokens"])-1
}) or dict_,
num_parallel_calls=num_threads).prefetch(output_buffer_size)
# Bucket by source sequence length (buckets for lengths 0-9, 10-19, ...)
def batching_func(batch_size, x):
# The first three entries are the source and target line rows;
# these have unknown-length vectors. The last two entries are
# the source and target row sizes; these are scalars.
padded_shapes = {
"source_tokens": tf.TensorShape([None]), # src
"target_tokens": tf.TensorShape([None]), # tgt
"source_len": tf.TensorShape([]), # src_len
"target_len": tf.TensorShape([])} # tgt_len
# Pad the source and target sequences with eos tokens.
# (Though notice we don't generally need to do this since
# later on we will be masking out calculations past the true sequence.
padded_values = {
"source_tokens": eos, "target_tokens": eos,
"source_len": 0, "target_len": 0}
if align_file:
padded_shapes.update({
"align_indices": tf.TensorShape([None, 2]),
"align_values": tf.TensorShape([None])}) # align
padded_values.update({
"align_indices": 0,
"align_values": tf.constant(0., dtype=constant_utils.DT_FLOAT())})
if kd_ref_file:
padded_shapes.update({
"kd_ref_tokens": tf.TensorShape([None]),
"kd_ref_len": tf.TensorShape([])}) # align
padded_values.update({
"kd_ref_tokens": eos,
"kd_ref_len": 0})
return x.padded_batch(
batch_size,
padded_shapes=padded_shapes,
padding_values=padded_values)
if self.mode == tf.estimator.ModeKeys.TRAIN:
# Calculate boundaries
min_length = 8
max_length = src_max_len if src_max_len else 1024
if tgt_max_len:
max_length = max(max_length, tgt_max_len)
# boundary for bucketing
x = min_length
boundaries = []
while x < max_length:
boundaries.append(x)
x = max(x + 1, int(x * 1.2))
if not self.params["fix_batch"]:
batch_sizes = [max(1, batch_size // length)
for length in boundaries + [max_length]]
if self.params["pad_to_eight"]:
batch_sizes = [max(1, x//8*8) for x in batch_sizes]
batch_sizes = [b * self.params["batch_multiplier"] for b in batch_sizes]
else:
batch_sizes = batch_size * self.params["batch_multiplier"]
def key_func(dict_):
# Bucket sentence pairs by the length of their source sentence and target
# sentence.
src_len = dict_["source_len"]
tgt_len = dict_["target_len"]
seq_len = tf.maximum(src_len, tgt_len)
buckets_min = [np.iinfo(np.int32).min] + boundaries
buckets_max = boundaries + [np.iinfo(np.int32).max]
conditions_c = tf.logical_and(
tf.less_equal(buckets_min, seq_len),
tf.less(seq_len, buckets_max))
bucket_id = tf.reduce_min(tf.where(conditions_c))
return bucket_id
def reduce_func(bucket_id, windowed_data):
return batching_func(batch_size_func(bucket_id), windowed_data)
def window_size_func(bucket_id):
return batch_size_func(bucket_id)
# batch_size for different bucket_id
# Used when batch_by_num_words is enabled
# In this case, the given batch_size represents the num of words in each batch
#
#bucket_boundaries = [max(1, i * bucket_width) for i in range(num_buckets)]
def batch_size_func(bucket_id):
if isinstance(batch_sizes, list):
batch_sizes_tensor = tf.constant(batch_sizes, dtype=tf.int64)
return batch_sizes_tensor[bucket_id]
return batch_sizes
batched_dataset = src_tgt_dataset.apply(
tf.contrib.data.group_by_window(
key_func=key_func, reduce_func=reduce_func,
window_size=None, window_size_func=window_size_func)
)
else:
batched_dataset = batching_func(
batch_size * self.params["batch_multiplier"],
src_tgt_dataset)
def _pad_to_eight(tensor, pad_more=0):
axis=1
pad_value = eos
shape = common_utils.shape_list(tensor)
max_len = shape[axis]
extra_len = tf.mod(8 - tf.mod(max_len, 8), 8)
extra_len += pad_more
paddings = [[0,0]] * len(shape)
paddings[axis] = [0, extra_len]
paddings = tf.convert_to_tensor(paddings)
tensor = tf.pad(
tensor, paddings,
constant_values=pad_value)
return tensor
if self.params["pad_to_eight"]:
batched_dataset = batched_dataset.map(
lambda dict_: dict_.update({
"source_tokens": _pad_to_eight(dict_["source_tokens"]),
"target_tokens": _pad_to_eight(dict_["target_tokens"], pad_more=1),
}) or dict_,
num_parallel_calls=8).prefetch(1000)
return batched_dataset
def read_data(self, seed=None, **kwargs):
src_files = self.params["source_files"]
tgt_files = self.params["target_files"]
# Dataset will be moved from contrib to tensorflow core
# So we keep using it
src_dataset = tf.data.TextLineDataset(src_files.split(","))
tgt_dataset = tf.data.TextLineDataset(tgt_files.split(","))
src_tgt_dataset = tf.data.Dataset.zip((src_dataset, tgt_dataset))
batched_dataset = self._create_batched_dataset(
src_tgt_dataset, seed=seed, **kwargs)
# prefetch 1000 batches
batched_dataset = batched_dataset.prefetch(1000)
batched_iter = batched_dataset.make_one_shot_iterator()
# return features
features = batched_iter.get_next()
return features
|
[
"tensorflow.maximum",
"numpy.iinfo",
"tensorflow.string_split",
"tensorflow.contrib.data.group_by_window",
"tensorflow.size",
"tensorflow.less",
"tensorflow.pad",
"tensorflow.TensorShape",
"tensorflow.less_equal",
"tensorflow.concat",
"tensorflow.reverse",
"tensorflow.constant",
"noahnmt.data.input_pipeline.InputPipeline.default_params",
"tensorflow.data.Dataset.zip",
"tensorflow.where",
"tensorflow.mod",
"noahnmt.utils.constant_utils.DT_FLOAT",
"noahnmt.layers.common_layers.shape_list",
"tensorflow.convert_to_tensor",
"tensorflow.data.TextLineDataset"
] |
[((2621, 2651), 'noahnmt.data.input_pipeline.InputPipeline.default_params', 'InputPipeline.default_params', ([], {}), '()\n', (2649, 2651), False, 'from noahnmt.data.input_pipeline import InputPipeline\n'), ((15015, 15062), 'tensorflow.data.Dataset.zip', 'tf.data.Dataset.zip', (['(src_dataset, tgt_dataset)'], {}), '((src_dataset, tgt_dataset))\n', (15034, 15062), True, 'import tensorflow as tf\n'), ((4786, 4822), 'tensorflow.data.TextLineDataset', 'tf.data.TextLineDataset', (['kd_ref_file'], {}), '(kd_ref_file)\n', (4809, 4822), True, 'import tensorflow as tf\n'), ((4847, 4897), 'tensorflow.data.Dataset.zip', 'tf.data.Dataset.zip', (['(src_tgt_dataset, kd_dataset)'], {}), '((src_tgt_dataset, kd_dataset))\n', (4866, 4897), True, 'import tensorflow as tf\n'), ((13893, 13924), 'noahnmt.layers.common_layers.shape_list', 'common_utils.shape_list', (['tensor'], {}), '(tensor)\n', (13916, 13924), True, 'from noahnmt.layers import common_layers as common_utils\n'), ((14127, 14157), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['paddings'], {}), '(paddings)\n', (14147, 14157), True, 'import tensorflow as tf\n'), ((14174, 14225), 'tensorflow.pad', 'tf.pad', (['tensor', 'paddings'], {'constant_values': 'pad_value'}), '(tensor, paddings, constant_values=pad_value)\n', (14180, 14225), True, 'import tensorflow as tf\n'), ((10071, 10093), 'tensorflow.TensorShape', 'tf.TensorShape', (['[None]'], {}), '([None])\n', (10085, 10093), True, 'import tensorflow as tf\n'), ((10129, 10151), 'tensorflow.TensorShape', 'tf.TensorShape', (['[None]'], {}), '([None])\n', (10143, 10151), True, 'import tensorflow as tf\n'), ((10184, 10202), 'tensorflow.TensorShape', 'tf.TensorShape', (['[]'], {}), '([])\n', (10198, 10202), True, 'import tensorflow as tf\n'), ((10239, 10257), 'tensorflow.TensorShape', 'tf.TensorShape', (['[]'], {}), '([])\n', (10253, 10257), True, 'import tensorflow as tf\n'), ((12363, 12391), 'tensorflow.maximum', 'tf.maximum', (['src_len', 'tgt_len'], {}), '(src_len, tgt_len)\n', (12373, 12391), True, 'import tensorflow as tf\n'), ((13488, 13620), 'tensorflow.contrib.data.group_by_window', 'tf.contrib.data.group_by_window', ([], {'key_func': 'key_func', 'reduce_func': 'reduce_func', 'window_size': 'None', 'window_size_func': 'window_size_func'}), '(key_func=key_func, reduce_func=reduce_func,\n window_size=None, window_size_func=window_size_func)\n', (13519, 13620), True, 'import tensorflow as tf\n'), ((12563, 12598), 'tensorflow.less_equal', 'tf.less_equal', (['buckets_min', 'seq_len'], {}), '(buckets_min, seq_len)\n', (12576, 12598), True, 'import tensorflow as tf\n'), ((12612, 12641), 'tensorflow.less', 'tf.less', (['seq_len', 'buckets_max'], {}), '(seq_len, buckets_max)\n', (12619, 12641), True, 'import tensorflow as tf\n'), ((12677, 12699), 'tensorflow.where', 'tf.where', (['conditions_c'], {}), '(conditions_c)\n', (12685, 12699), True, 'import tensorflow as tf\n'), ((13315, 13355), 'tensorflow.constant', 'tf.constant', (['batch_sizes'], {'dtype': 'tf.int64'}), '(batch_sizes, dtype=tf.int64)\n', (13326, 13355), True, 'import tensorflow as tf\n'), ((13982, 14000), 'tensorflow.mod', 'tf.mod', (['max_len', '(8)'], {}), '(max_len, 8)\n', (13988, 14000), True, 'import tensorflow as tf\n'), ((6073, 6104), 'tensorflow.size', 'tf.size', (["dict_['source_tokens']"], {}), "(dict_['source_tokens'])\n", (6080, 6104), True, 'import tensorflow as tf\n'), ((6121, 6152), 'tensorflow.size', 'tf.size', (["dict_['target_tokens']"], {}), "(dict_['target_tokens'])\n", (6128, 6152), True, 'import tensorflow as tf\n'), ((6254, 6285), 'tensorflow.size', 'tf.size', (["dict_['kd_ref_tokens']"], {}), "(dict_['kd_ref_tokens'])\n", (6261, 6285), True, 'import tensorflow as tf\n'), ((10679, 10704), 'tensorflow.TensorShape', 'tf.TensorShape', (['[None, 2]'], {}), '([None, 2])\n', (10693, 10704), True, 'import tensorflow as tf\n'), ((10735, 10757), 'tensorflow.TensorShape', 'tf.TensorShape', (['[None]'], {}), '([None])\n', (10749, 10757), True, 'import tensorflow as tf\n'), ((11000, 11022), 'tensorflow.TensorShape', 'tf.TensorShape', (['[None]'], {}), '([None])\n', (11014, 11022), True, 'import tensorflow as tf\n'), ((11051, 11069), 'tensorflow.TensorShape', 'tf.TensorShape', (['[]'], {}), '([])\n', (11065, 11069), True, 'import tensorflow as tf\n'), ((6786, 6817), 'tensorflow.size', 'tf.size', (["dict_['source_tokens']"], {}), "(dict_['source_tokens'])\n", (6793, 6817), True, 'import tensorflow as tf\n'), ((7231, 7262), 'tensorflow.size', 'tf.size', (["dict_['target_tokens']"], {}), "(dict_['target_tokens'])\n", (7238, 7262), True, 'import tensorflow as tf\n'), ((12415, 12433), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (12423, 12433), True, 'import numpy as np\n'), ((12488, 12506), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (12496, 12506), True, 'import numpy as np\n'), ((7381, 7412), 'tensorflow.size', 'tf.size', (["dict_['kd_ref_tokens']"], {}), "(dict_['kd_ref_tokens'])\n", (7388, 7412), True, 'import tensorflow as tf\n'), ((10882, 10907), 'noahnmt.utils.constant_utils.DT_FLOAT', 'constant_utils.DT_FLOAT', ([], {}), '()\n', (10905, 10907), False, 'from noahnmt.utils import constant_utils\n'), ((8249, 8294), 'tensorflow.concat', 'tf.concat', (["(dict_['source_tokens'], [eos])", '(0)'], {}), "((dict_['source_tokens'], [eos]), 0)\n", (8258, 8294), True, 'import tensorflow as tf\n'), ((8323, 8375), 'tensorflow.concat', 'tf.concat', (["([sos], dict_['target_tokens'], [eos])", '(0)'], {}), "(([sos], dict_['target_tokens'], [eos]), 0)\n", (8332, 8375), True, 'import tensorflow as tf\n'), ((9275, 9306), 'tensorflow.size', 'tf.size', (["dict_['source_tokens']"], {}), "(dict_['source_tokens'])\n", (9282, 9306), True, 'import tensorflow as tf\n'), ((5435, 5476), 'tensorflow.string_split', 'tf.string_split', (["[dict_['source_tokens']]"], {}), "([dict_['source_tokens']])\n", (5450, 5476), True, 'import tensorflow as tf\n'), ((5516, 5557), 'tensorflow.string_split', 'tf.string_split', (["[dict_['target_tokens']]"], {}), "([dict_['target_tokens']])\n", (5531, 5557), True, 'import tensorflow as tf\n'), ((7914, 7958), 'tensorflow.reverse', 'tf.reverse', (["dict_['source_tokens']"], {'axis': '[0]'}), "(dict_['source_tokens'], axis=[0])\n", (7924, 7958), True, 'import tensorflow as tf\n'), ((8613, 8658), 'tensorflow.concat', 'tf.concat', (["([sos], dict_['source_tokens'])", '(0)'], {}), "(([sos], dict_['source_tokens']), 0)\n", (8622, 8658), True, 'import tensorflow as tf\n'), ((8886, 8938), 'tensorflow.concat', 'tf.concat', (["([sos], dict_['kd_ref_tokens'], [eos])", '(0)'], {}), "(([sos], dict_['kd_ref_tokens'], [eos]), 0)\n", (8895, 8938), True, 'import tensorflow as tf\n'), ((9331, 9362), 'tensorflow.size', 'tf.size', (["dict_['target_tokens']"], {}), "(dict_['target_tokens'])\n", (9338, 9362), True, 'import tensorflow as tf\n'), ((5792, 5833), 'tensorflow.string_split', 'tf.string_split', (["[dict_['kd_ref_tokens']]"], {}), "([dict_['kd_ref_tokens']])\n", (5807, 5833), True, 'import tensorflow as tf\n'), ((9582, 9613), 'tensorflow.size', 'tf.size', (["dict_['kd_ref_tokens']"], {}), "(dict_['kd_ref_tokens'])\n", (9589, 9613), True, 'import tensorflow as tf\n')]
|
"""
eval auc curve
pred score in npy
ground true in mat
"""
import os
import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import roc_auc_score,roc_curve,auc
from net.utils.parser import load_config,parse_args
import net.utils.logging_tool as logging
from sklearn import metrics
from net.utils.load_ground_true import load_shanghaitech
import scipy.io as scio
logger=logging.get_logger(__name__)
def save_fpr_tpr(fpr, tpr,mat_name,roc_value):
"""
draw roc
:param y_pred:
:param y_score:
:return:
"""
fpr=np.expand_dims(fpr,axis=1)
tpr=np.expand_dims(tpr,axis=1)
mat_name=mat_name.split("/")[-1]
mat_new=r"F:\SPL_Save_Folder\SRF\SH\roc_mat/"+mat_name+str(roc_value)[2:6]+".mat"
scio.savemat(mat_new, {'X': fpr, "Y": tpr, "description ": "SH ROC Cruve"+mat_name})
# plt.title("roc curve")
# plt.plot(fpr, tpr,)
# plt.show()
def remove_edge(plt):
"""
visual ground in non-line bar
:param plt:
:return:
"""
fig, ax = plt.subplots()
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
def show_ground_true(y,score):
# ax = plt.gca() # 获取到当前坐标轴信息
# ax.xaxis.set_ticks_position('top') # 将X坐标轴移到上面
# ax.invert_yaxis() # 反转Y坐标轴
plt.xlim((0, len(y)))
plt.ylim((0, 1.01))
x=np.arange(len(y))
plt.plot(x, score,"r")
plt.bar(x,y,width=1)
plt.show()
def show_score_ground_true(y_score,y_label,title_name,norm_mode,cfg):
plt.cla()
plt.title(title_name)
plt.ylim((0, 1))
x = np.arange(len(y_score))
plt.plot(x, y_score,"r-",label="pred_score")
plt.plot(x,y_label,"g-",label="ground_true")
plt.legend() # 添加图例
# save folder
save_folder=os.path.join(
cfg.TEST.SAVE_NPY_PATH,norm_mode
)
if not os.path.exists(save_folder):
os.makedirs(save_folder)
plt.savefig(os.path.join(
save_folder,title_name+".png"
))
# plt.show()
def show_line_one_video(y_score):
x=np.arange(len(y_score))
plt.plot(x,y_score)
plt.show()
def show_pred_score_and_ground_true(y_score,y_label):
x = np.arange(len(y_score))
plt.plot(x, y_score,"r--")
plt.plot(x,y_label,"g--")
plt.show()
def roc_draw(y_pred_score,y_label):
"""
draw roc
:param y_pred:
:param y_score:
:return:
"""
fpr, tpr, thresholds =roc_curve(y_label, y_pred_score, pos_label=None, sample_weight=None,
drop_intermediate=True)
plt.title("roc curve")
plt.plot(fpr, tpr, marker='o')
plt.show()
def cal_auc(y_pred,y_label,cfg):
"""
calculate auc
:param y_pred:
:param y_label:
:return:
"""
assert len(y_pred)==len(y_label)
# auc=roc_auc_score(y_label,y_pred)
fpr, tpr, thresholds = metrics.roc_curve(y_label, y_pred)
# metrics.auc(fpr, tpr)
# plt x=fpr,y=tpr
rec_auc = auc(fpr, tpr)
plt.title("shanghai tech ")
plt.plot(fpr, tpr)
plt.show()
save_fpr_tpr(fpr,tpr,cfg.OUTPUT_DIR,rec_auc)
# auc=roc_auc_score(y_label,y_pred)
return rec_auc
def SH_GROUND_TRUE_ANNO(anao_txt):
"""
load
D:\dataset\ShanghaiTech_new_split/SH_Test_Annotate.txt
:param ANAO_TXT:
:return:
"""
r_lines=[]
# total_length=0
with open(anao_txt,"r") as f:
lines=f.readlines()
for line in lines:
line=line.strip()
# total_length+=(int(line.split(" ")[-1])//16*16)
r_lines.append(line)
return r_lines
def sh_label_pred_score_unmerged(label_line,pred_array,cfg):
"""
pred array is custom score or feature nums score
slide windows to do pred
:param label_line:
:param pred_array:
:return:
"""
#01_001 normal 764
video_name,abnormal_class, T_length =label_line.split(" ")
T_length=int(T_length)
pred_scores=[]
# make score to T_length each feature contain 16 non-overlap frames
feature_num=(T_length)//16
for item in pred_array:
_item=[item]*16
pred_scores+=_item
# ground ture
if abnormal_class in ["Normal"]:
ground_ture=[0]*feature_num*16
elif abnormal_class in ["Abnormal"]:
ground_ture=load_one_tech_test_npy_anno(video_name).tolist()
ground_ture=ground_ture[:feature_num*16]
assert len(pred_scores)==len(ground_ture) ,"miss match in length of pred score and ground true "
# draw line to visual
#show_score_ground_true(pred_scores,ground_ture,abnormal_class+"_"+video_name,"all_norm",cfg)
return pred_scores,ground_ture
def sh_label_pred_score_merged(label_line,pred_array,cfg):
"""
pred array is 32 score or feature nums score
1 for abnormal and 0 for normal
:param label_line:
:param pred_array:
:return:
"""
#Abuse028_x264.mp4 Abuse 165 240 -1 -1 1412
video_name,abnormal_class,T_length =label_line.split(" ")
T_length=int(T_length)
pred_scores=[0]*T_length
# ground ture
if abnormal_class in ["Normal"]:
ground_ture = [0] * T_length
elif abnormal_class in ["Abnormal"]:
ground_ture = load_one_tech_test_npy_anno(video_name).tolist()
segments_len = T_length // 32
for i in range(32):
segment_start_frame = int(i * segments_len)
segment_end_frame = int((i + 1) * segments_len)
pred_scores[segment_start_frame:segment_end_frame] = [pred_array[i]]*(segment_end_frame-segment_start_frame)
# pred_scores[int(32 * segments_len):] = [pred_array[-1]] * (len(pred_scores[int(32 * segments_len):]))
assert len(pred_scores)==len(ground_ture) ,"miss match in length of pred score and ground true "
# draw line to visual
#show_score_ground_true(pred_scores,ground_ture,abnormal_class+"_"+video_name,"no_norm",cfg)
return pred_scores,ground_ture
def load_one_tech_test_npy_anno(video_name):
# frame mask
# and pixel mask
frame_mask_root=r"D:\AnomalyDataset\shanghaitech\testing\test_frame_mask"
gt=np.load(
os.path.join(
frame_mask_root,video_name+".npy"
)
)
return gt
def get_label_and_score(ano_line,save_folder,cfg):
y_preds = []
y_labels = []
for line in ano_line:
video_name, abnormal_class, T_length = line.split(" ")
# load npy
pred_array = np.load(
os.path.join(
save_folder, video_name + ".npy"
)
)
y_pred, y_label = sh_label_pred_score_unmerged(line, pred_array,cfg)
y_preds += y_pred
y_labels += y_label
# y_preds=norm_min_max(np.array(y_preds)).tolist()
# y_preds=[max(0,x-0.225) for x in y_preds]
return y_preds, y_labels
def eval_auc_roc(cfg):
"""
load y_pred_score len = list * cfg.TEST.VIDEO_NUM
load y_label {0,1} 0 for abnormal 1 for normal
:param cfg:
:return:
"""
logging.setup_logging(cfg.OUTPUT_DIR,cfg.AUC_LOGFILE_NAME)
ano_line=SH_GROUND_TRUE_ANNO(
r"E:\datasets\shanghaitech_C3D_Feature/SH_Test_Annotate.txt"
)
y_pred_score,y_label=get_label_and_score(
ano_line,os.path.join(cfg.TEST.SAVE_NPY_PATH,"PRED_TEST_SCORE"),cfg
)
# y_pred_score=load_npy_tech(cfg.TEST.SAVE_NPY_PATH,cfg.TEST.PATH,cfg)
# y_label=load_shanghaitech(cfg.TECH.FRAME_MAT_FOLDER)
auc_values=[]
assert len(y_pred_score)==len(y_label) ,"len{} and len{}not match".format("y_pred_score","y_label")
# logger.info("auc for each video and all video ")
auc_value = cal_auc(y_pred_score, y_label,cfg)
# # roc_draw(total_y_pred, total_y_label)
# logger.info("total auc value:{}".format(auc_value))
print("total auc value:{}".format(auc_value))
if __name__=="__main__":
"""
load pred score
score close to 0 mean anomaly
load ground true
draw roc
"""
args=parse_args()
cfg=load_config(args)
eval_auc_roc(cfg)
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.bar",
"os.path.join",
"os.path.exists",
"net.utils.parser.load_config",
"matplotlib.pyplot.cla",
"net.utils.logging_tool.get_logger",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show",
"net.utils.logging_tool.setup_logging",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.legend",
"os.makedirs",
"matplotlib.pyplot.plot",
"sklearn.metrics.roc_curve",
"scipy.io.savemat",
"numpy.expand_dims",
"sklearn.metrics.auc",
"net.utils.parser.parse_args"
] |
[((387, 415), 'net.utils.logging_tool.get_logger', 'logging.get_logger', (['__name__'], {}), '(__name__)\n', (405, 415), True, 'import net.utils.logging_tool as logging\n'), ((554, 581), 'numpy.expand_dims', 'np.expand_dims', (['fpr'], {'axis': '(1)'}), '(fpr, axis=1)\n', (568, 581), True, 'import numpy as np\n'), ((589, 616), 'numpy.expand_dims', 'np.expand_dims', (['tpr'], {'axis': '(1)'}), '(tpr, axis=1)\n', (603, 616), True, 'import numpy as np\n'), ((746, 836), 'scipy.io.savemat', 'scio.savemat', (['mat_new', "{'X': fpr, 'Y': tpr, 'description ': 'SH ROC Cruve' + mat_name}"], {}), "(mat_new, {'X': fpr, 'Y': tpr, 'description ': 'SH ROC Cruve' +\n mat_name})\n", (758, 836), True, 'import scipy.io as scio\n'), ((1021, 1035), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1033, 1035), True, 'import matplotlib.pyplot as plt\n'), ((1388, 1407), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0, 1.01)'], {}), '((0, 1.01))\n', (1396, 1407), True, 'import matplotlib.pyplot as plt\n'), ((1436, 1459), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'score', '"""r"""'], {}), "(x, score, 'r')\n", (1444, 1459), True, 'import matplotlib.pyplot as plt\n'), ((1463, 1485), 'matplotlib.pyplot.bar', 'plt.bar', (['x', 'y'], {'width': '(1)'}), '(x, y, width=1)\n', (1470, 1485), True, 'import matplotlib.pyplot as plt\n'), ((1489, 1499), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1497, 1499), True, 'import matplotlib.pyplot as plt\n'), ((1575, 1584), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (1582, 1584), True, 'import matplotlib.pyplot as plt\n'), ((1589, 1610), 'matplotlib.pyplot.title', 'plt.title', (['title_name'], {}), '(title_name)\n', (1598, 1610), True, 'import matplotlib.pyplot as plt\n'), ((1615, 1631), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0, 1)'], {}), '((0, 1))\n', (1623, 1631), True, 'import matplotlib.pyplot as plt\n'), ((1668, 1714), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_score', '"""r-"""'], {'label': '"""pred_score"""'}), "(x, y_score, 'r-', label='pred_score')\n", (1676, 1714), True, 'import matplotlib.pyplot as plt\n'), ((1717, 1764), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_label', '"""g-"""'], {'label': '"""ground_true"""'}), "(x, y_label, 'g-', label='ground_true')\n", (1725, 1764), True, 'import matplotlib.pyplot as plt\n'), ((1766, 1778), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1776, 1778), True, 'import matplotlib.pyplot as plt\n'), ((1821, 1868), 'os.path.join', 'os.path.join', (['cfg.TEST.SAVE_NPY_PATH', 'norm_mode'], {}), '(cfg.TEST.SAVE_NPY_PATH, norm_mode)\n', (1833, 1868), False, 'import os\n'), ((2116, 2136), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_score'], {}), '(x, y_score)\n', (2124, 2136), True, 'import matplotlib.pyplot as plt\n'), ((2140, 2150), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2148, 2150), True, 'import matplotlib.pyplot as plt\n'), ((2242, 2269), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_score', '"""r--"""'], {}), "(x, y_score, 'r--')\n", (2250, 2269), True, 'import matplotlib.pyplot as plt\n'), ((2273, 2300), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_label', '"""g--"""'], {}), "(x, y_label, 'g--')\n", (2281, 2300), True, 'import matplotlib.pyplot as plt\n'), ((2303, 2313), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2311, 2313), True, 'import matplotlib.pyplot as plt\n'), ((2458, 2554), 'sklearn.metrics.roc_curve', 'roc_curve', (['y_label', 'y_pred_score'], {'pos_label': 'None', 'sample_weight': 'None', 'drop_intermediate': '(True)'}), '(y_label, y_pred_score, pos_label=None, sample_weight=None,\n drop_intermediate=True)\n', (2467, 2554), False, 'from sklearn.metrics import roc_auc_score, roc_curve, auc\n'), ((2587, 2609), 'matplotlib.pyplot.title', 'plt.title', (['"""roc curve"""'], {}), "('roc curve')\n", (2596, 2609), True, 'import matplotlib.pyplot as plt\n'), ((2614, 2644), 'matplotlib.pyplot.plot', 'plt.plot', (['fpr', 'tpr'], {'marker': '"""o"""'}), "(fpr, tpr, marker='o')\n", (2622, 2644), True, 'import matplotlib.pyplot as plt\n'), ((2649, 2659), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2657, 2659), True, 'import matplotlib.pyplot as plt\n'), ((2885, 2919), 'sklearn.metrics.roc_curve', 'metrics.roc_curve', (['y_label', 'y_pred'], {}), '(y_label, y_pred)\n', (2902, 2919), False, 'from sklearn import metrics\n'), ((2986, 2999), 'sklearn.metrics.auc', 'auc', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (2989, 2999), False, 'from sklearn.metrics import roc_auc_score, roc_curve, auc\n'), ((3006, 3033), 'matplotlib.pyplot.title', 'plt.title', (['"""shanghai tech """'], {}), "('shanghai tech ')\n", (3015, 3033), True, 'import matplotlib.pyplot as plt\n'), ((3038, 3056), 'matplotlib.pyplot.plot', 'plt.plot', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (3046, 3056), True, 'import matplotlib.pyplot as plt\n'), ((3061, 3071), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3069, 3071), True, 'import matplotlib.pyplot as plt\n'), ((6977, 7036), 'net.utils.logging_tool.setup_logging', 'logging.setup_logging', (['cfg.OUTPUT_DIR', 'cfg.AUC_LOGFILE_NAME'], {}), '(cfg.OUTPUT_DIR, cfg.AUC_LOGFILE_NAME)\n', (6998, 7036), True, 'import net.utils.logging_tool as logging\n'), ((7940, 7952), 'net.utils.parser.parse_args', 'parse_args', ([], {}), '()\n', (7950, 7952), False, 'from net.utils.parser import load_config, parse_args\n'), ((7961, 7978), 'net.utils.parser.load_config', 'load_config', (['args'], {}), '(args)\n', (7972, 7978), False, 'from net.utils.parser import load_config, parse_args\n'), ((1893, 1920), 'os.path.exists', 'os.path.exists', (['save_folder'], {}), '(save_folder)\n', (1907, 1920), False, 'import os\n'), ((1930, 1954), 'os.makedirs', 'os.makedirs', (['save_folder'], {}), '(save_folder)\n', (1941, 1954), False, 'import os\n'), ((1971, 2017), 'os.path.join', 'os.path.join', (['save_folder', "(title_name + '.png')"], {}), "(save_folder, title_name + '.png')\n", (1983, 2017), False, 'import os\n'), ((6112, 6162), 'os.path.join', 'os.path.join', (['frame_mask_root', "(video_name + '.npy')"], {}), "(frame_mask_root, video_name + '.npy')\n", (6124, 6162), False, 'import os\n'), ((7209, 7264), 'os.path.join', 'os.path.join', (['cfg.TEST.SAVE_NPY_PATH', '"""PRED_TEST_SCORE"""'], {}), "(cfg.TEST.SAVE_NPY_PATH, 'PRED_TEST_SCORE')\n", (7221, 7264), False, 'import os\n'), ((6441, 6487), 'os.path.join', 'os.path.join', (['save_folder', "(video_name + '.npy')"], {}), "(save_folder, video_name + '.npy')\n", (6453, 6487), False, 'import os\n')]
|
import numpy as np
class WordEmbeddings(object):
def __init__(self, size=300, n_words=10000):
self.size = size
self.n_words = n_words
width = 0.5 / self.size
self.embeddings = np.random.uniform(min=-width, max=width, size=(n_words, size))
|
[
"numpy.random.uniform"
] |
[((215, 277), 'numpy.random.uniform', 'np.random.uniform', ([], {'min': '(-width)', 'max': 'width', 'size': '(n_words, size)'}), '(min=-width, max=width, size=(n_words, size))\n', (232, 277), True, 'import numpy as np\n')]
|
import os
import json
import numpy as np
from scipy import sparse
from scipy.sparse import csr_matrix
try:
import tensorflow as tf
except ModuleNotFoundError:
tf = None
class Backend:
def __init__(self, dirname):
self.dirname = dirname
def save(self, obj):
# store obj
raise NotImplementedError()
def load(self):
# return saved obj
raise NotImplementedError()
class TFGraph(Backend):
"""
Tensorflow knows better how to name your files:
- path.data-00000-of-00001 - weights
- path.index - ??
- path.meta - graph
"""
def save(self, session_graph):
path = os.path.join(self.dirname, "graph")
assert not os.path.exists(path + ".data-00000-of-00001")
assert not os.path.exists(path + ".index")
assert not os.path.exists(path + ".meta")
session, graph = session_graph
with graph.as_default():
saver = tf.train.Saver(
var_list=tf.get_collection(
tf.GraphKeys.GLOBAL_VARIABLES),
max_to_keep=1,
keep_checkpoint_every_n_hours=437437437, # wait for it
sharded=False) # doesn't help, eh...
saver.save(
session,
save_path=path,
meta_graph_suffix="meta",
write_meta_graph=True,
write_state=False)
def load(self):
path = os.path.join(self.dirname, "graph")
assert os.path.exists(path + ".data-00000-of-00001")
assert os.path.exists(path + ".index")
assert os.path.exists(path + ".meta")
graph = tf.Graph()
with graph.as_default():
session = tf.Session(graph=graph)
saver = tf.train.import_meta_graph(path + ".meta")
saver.restore(session, path)
return (session, graph)
class NumpyNDArray(Backend):
def save(self, arr):
np.save(os.path.join(self.dirname, "data.npy"), arr)
def load(self):
_path = os.path.join(self.dirname, "data.npy")
return np.load(
os.path.join(self.dirname, "data.npy"),
mmap_mode='r',
allow_pickle=False,
)
class NumpyVarStr(Backend):
@staticmethod
def save_varstr_to_file(path, arr):
assert len(arr.shape) == 1
assert np.vectorize(lambda x: '\n' not in x, otypes=[np.bool])(arr).all()
s = '\n'.join(list(arr)) + '\n'
with open(path, 'w') as f_out:
f_out.write(s)
@staticmethod
def load_varstr_from_file(path):
with open(path, 'r') as f_in:
content = f_in.read()
assert content[-1] == '\n'
if content == '\n':
return np.array([], dtype=np.object)
else:
return np.array(content[:-1].split('\n'), dtype=np.object)
def save(self, arr):
NumpyVarStr.save_varstr_to_file(
os.path.join(self.dirname, "data.txt"),
arr
)
def load(self):
return NumpyVarStr.load_varstr_from_file(
os.path.join(self.dirname, "data.txt"))
class SciPyCSR(Backend):
@staticmethod
def save_csr_to_file(path, arr):
assert path.endswith(".npz") # because scipy says so
sparse.save_npz(path, arr, compressed=True)
@staticmethod
def load_csr_from_file(path):
assert path.endswith(".npz") # because scipy says so
return sparse.load_npz(path)
def save(self, arr):
SciPyCSR.save_csr_to_file(
os.path.join(self.dirname, "data.npz"),
arr
)
def load(self):
return SciPyCSR.load_csr_from_file(
os.path.join(self.dirname, "data.npz"))
class Basic(Backend):
def save(self, obj):
with open(os.path.join(self.dirname, "data.json"), 'x') as f_out:
json.dump(obj, f_out)
def load(self):
with open(os.path.join(self.dirname, "data.json"), 'r') as f_in:
return json.load(f_in)
def backend_by_object(obj):
if isinstance(obj, (int, bool, float, str)) or obj is None:
return Basic
elif isinstance(obj, tuple):
if tf is not None and isinstance(obj[1], tf.Graph):
return TFGraph
else:
return None
elif isinstance(obj, csr_matrix):
return SciPyCSR
elif isinstance(obj, np.ndarray):
if obj.dtype == np.object:
if np.vectorize(
lambda x: isinstance(x, str),
otypes=[np.bool])(obj).all():
return NumpyVarStr
else:
return None
else:
return NumpyNDArray
else:
return None
|
[
"json.dump",
"json.load",
"numpy.vectorize",
"tensorflow.train.import_meta_graph",
"scipy.sparse.load_npz",
"tensorflow.get_collection",
"os.path.exists",
"tensorflow.Session",
"numpy.array",
"scipy.sparse.save_npz",
"tensorflow.Graph",
"os.path.join"
] |
[((653, 688), 'os.path.join', 'os.path.join', (['self.dirname', '"""graph"""'], {}), "(self.dirname, 'graph')\n", (665, 688), False, 'import os\n'), ((1446, 1481), 'os.path.join', 'os.path.join', (['self.dirname', '"""graph"""'], {}), "(self.dirname, 'graph')\n", (1458, 1481), False, 'import os\n'), ((1497, 1542), 'os.path.exists', 'os.path.exists', (["(path + '.data-00000-of-00001')"], {}), "(path + '.data-00000-of-00001')\n", (1511, 1542), False, 'import os\n'), ((1558, 1589), 'os.path.exists', 'os.path.exists', (["(path + '.index')"], {}), "(path + '.index')\n", (1572, 1589), False, 'import os\n'), ((1605, 1635), 'os.path.exists', 'os.path.exists', (["(path + '.meta')"], {}), "(path + '.meta')\n", (1619, 1635), False, 'import os\n'), ((1652, 1662), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (1660, 1662), True, 'import tensorflow as tf\n'), ((2030, 2068), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.npy"""'], {}), "(self.dirname, 'data.npy')\n", (2042, 2068), False, 'import os\n'), ((3284, 3327), 'scipy.sparse.save_npz', 'sparse.save_npz', (['path', 'arr'], {'compressed': '(True)'}), '(path, arr, compressed=True)\n', (3299, 3327), False, 'from scipy import sparse\n'), ((3456, 3477), 'scipy.sparse.load_npz', 'sparse.load_npz', (['path'], {}), '(path)\n', (3471, 3477), False, 'from scipy import sparse\n'), ((708, 753), 'os.path.exists', 'os.path.exists', (["(path + '.data-00000-of-00001')"], {}), "(path + '.data-00000-of-00001')\n", (722, 753), False, 'import os\n'), ((773, 804), 'os.path.exists', 'os.path.exists', (["(path + '.index')"], {}), "(path + '.index')\n", (787, 804), False, 'import os\n'), ((824, 854), 'os.path.exists', 'os.path.exists', (["(path + '.meta')"], {}), "(path + '.meta')\n", (838, 854), False, 'import os\n'), ((1718, 1741), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (1728, 1741), True, 'import tensorflow as tf\n'), ((1762, 1804), 'tensorflow.train.import_meta_graph', 'tf.train.import_meta_graph', (["(path + '.meta')"], {}), "(path + '.meta')\n", (1788, 1804), True, 'import tensorflow as tf\n'), ((1949, 1987), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.npy"""'], {}), "(self.dirname, 'data.npy')\n", (1961, 1987), False, 'import os\n'), ((2105, 2143), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.npy"""'], {}), "(self.dirname, 'data.npy')\n", (2117, 2143), False, 'import os\n'), ((2946, 2984), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.txt"""'], {}), "(self.dirname, 'data.txt')\n", (2958, 2984), False, 'import os\n'), ((3094, 3132), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.txt"""'], {}), "(self.dirname, 'data.txt')\n", (3106, 3132), False, 'import os\n'), ((3550, 3588), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.npz"""'], {}), "(self.dirname, 'data.npz')\n", (3562, 3588), False, 'import os\n'), ((3692, 3730), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.npz"""'], {}), "(self.dirname, 'data.npz')\n", (3704, 3730), False, 'import os\n'), ((3866, 3887), 'json.dump', 'json.dump', (['obj', 'f_out'], {}), '(obj, f_out)\n', (3875, 3887), False, 'import json\n'), ((4000, 4015), 'json.load', 'json.load', (['f_in'], {}), '(f_in)\n', (4009, 4015), False, 'import json\n'), ((2745, 2774), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.object'}), '([], dtype=np.object)\n', (2753, 2774), True, 'import numpy as np\n'), ((3798, 3837), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.json"""'], {}), "(self.dirname, 'data.json')\n", (3810, 3837), False, 'import os\n'), ((3926, 3965), 'os.path.join', 'os.path.join', (['self.dirname', '"""data.json"""'], {}), "(self.dirname, 'data.json')\n", (3938, 3965), False, 'import os\n'), ((988, 1036), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {}), '(tf.GraphKeys.GLOBAL_VARIABLES)\n', (1005, 1036), True, 'import tensorflow as tf\n'), ((2351, 2406), 'numpy.vectorize', 'np.vectorize', (["(lambda x: '\\n' not in x)"], {'otypes': '[np.bool]'}), "(lambda x: '\\n' not in x, otypes=[np.bool])\n", (2363, 2406), True, 'import numpy as np\n')]
|
import os, sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import open3d as o3d
os.environ['PYOPENGL_PLATFORM'] = 'osmesa'
# os.environ['PYOPENGL_PLATFORM'] = 'egl'
# os.environ['EGL_DEVICE_ID'] = os.environ['SLURM_STEP_GPUS']
import numpy as np
import time
from tqdm import tqdm
import math
import json
import trimesh
import argparse
import traceback
import pyrender
import glob
import subprocess as sp
import shutil
from scipy.linalg import expm, norm
import multiprocessing as mp
from multiprocessing import Pool
import utils.utils as utils
from NPMs._C import compute_mesh_from_depth as compute_mesh_from_depth_c
from utils.image_proc import backproject_depth
from data_scripts import config_data as cfg
import utils.render_utils as render_utils
from utils.pcd_utils import (BBox,
transform_pointcloud_to_opengl_coords,
rotate_around_axis,
origin, normalize_transformation)
T_opengl_cv = np.array(
[[1.0, 0.0, 0.0, 0.0],
[0.0, -1.0, 0.0, 0.0],
[0.0, 0.0, -1.0, 0.0],
[0.0, 0.0, 0.0, 1.0]]
)
T_opengl_cv_3x3 = np.array(
[[1.0, 0.0, 0.0],
[0.0, -1.0, 0.0 ],
[0.0, 0.0, -1.0 ]]
)
def merge_meshes(vertices_array, faces_array, total_num_vertices, total_num_faces):
merged_vertices = np.zeros((total_num_vertices, 3), dtype=np.float32)
merged_faces = np.zeros((total_num_faces, 3), dtype=np.int32)
offset_vertices = 0
offset_faces = 0
for i in range(len(vertices_array)):
vertices = vertices_array[i]
faces = faces_array[i]
merged_vertices[offset_vertices:offset_vertices+vertices.shape[0]] = vertices
merged_faces[offset_faces:offset_faces+faces.shape[0]] = faces + offset_vertices
offset_vertices += vertices.shape[0]
offset_faces += faces.shape[0]
return merged_vertices, merged_faces
def transform_mesh(vertices, R, t):
num_vertices = vertices.shape[0]
vertices = np.matmul(
np.repeat(R.reshape(1, 3, 3), num_vertices, axis=0),
vertices.reshape(-1, 3, 1)
) + np.repeat(t.reshape(1, 3, 1), num_vertices, axis=0)
vertices = vertices.reshape(-1, 3)
return vertices
class SimpleRenderer:
def __init__(self, mesh, w, h, fx, fy, cx, cy, znear=0.05, zfar=5.0):
self.w = w
self.h = h
self.fx = fx
self.fy = fy
self.cx = cx
self.cy = cy
self.scene = pyrender.Scene(ambient_light=np.array([1., 1., 1., 1.]))
# Initialize camera setup.
self.camera_setup = render_utils.CameraSetup(camera_setup, None, radius, num_subdivisions_ico)
self.camera = pyrender.IntrinsicsCamera(fx=fx, fy=fy, cx=cx, cy=cy, znear=znear, zfar=zfar)
self.scene.add(self.camera, pose=np.eye(4), name="camera")
# Most of the times meshes are given in OpenGL coordinate system.
# We convert to vision CS first.
mesh = mesh.apply_transform(T_opengl_cv)
# Create pyrender mesh and add it to scene.
self.mesh = pyrender.Mesh.from_trimesh(mesh, smooth=False)
self.scene.add(self.mesh, name="mesh")
# Compute initial camera pose.
mesh_min = mesh.bounds[0]
mesh_max = mesh.bounds[1]
self.mesh_center = (mesh_min + mesh_max) / 2.0
self.mesh_extent = mesh_max - mesh_min
# Create a simple light setup with 4 directional lights and add it to the scene.
num_lights = 4
for light_id in range(num_lights):
world_to_light = self.compute_world_to_light(light_id, num_lights, radius)
light = pyrender.PointLight(color=[1.0, 1.0, 1.0], intensity=0.7)
nl = pyrender.Node(light=light, matrix=np.linalg.inv(world_to_light))
self.scene.add_node(nl)
def compute_world_to_camera(self, i):
radius_vec = self.camera_setup.get_camera_unit_vector(i)
eye = self.mesh_center - radius_vec
look_dir = self.mesh_center - eye
camera_up = self.camera_setup.get_camera_up(i)
T_world_to_camera = render_utils.compute_view_matrix(eye, look_dir, camera_up)
# The rotation around x-axis needs to be applied for OpenGL CS.
T_world_to_camera = np.matmul(T_opengl_cv, T_world_to_camera)
return T_world_to_camera
def compute_world_to_light(self, i, num_lights, radius):
angle = (1.0 * i / num_lights) * 2 * math.pi
rotation = render_utils.convert_axis_angle_to_rotation_matrix(np.array([0.0, 1.0, 0.0]), angle)
unit_vec = np.matmul(rotation, np.array([0, 0, 1]))
unit_vec /= norm(unit_vec)
radius_vec = radius * unit_vec
eye = self.mesh_center - radius_vec
look_dir = self.mesh_center - eye
camera_up = np.array([0.0, -1.0, 0.0])
world_to_light = render_utils.compute_view_matrix(eye, look_dir, camera_up)
return world_to_light
def compute_merged_mesh(self):
vertices_array = []
faces_array = []
total_num_vertices = 0
total_num_faces = 0
for camera_id in range(self.camera_setup.get_num_cameras()):
# print(f"Rendering camera {camera_id} / {self.camera_setup.get_num_cameras()}")
# Compute current camera pose.
T_world_to_camera = self.compute_world_to_camera(camera_id)
# T_camera_to_world = self.compute_camera_to_world(camera_id)
T_camera_to_world = np.linalg.inv(T_world_to_camera)
R_camera_to_world = T_camera_to_world[:3, :3]
t_camera_to_world = T_camera_to_world[:3, 3]
# Adapt the camera pose.
camera_node = list(self.scene.get_nodes(name="camera"))[0]
self.scene.set_pose(camera_node, pose=T_camera_to_world)
###################################################################################
# Render color and depth.
###################################################################################
renderer = pyrender.OffscreenRenderer(viewport_width=self.w, viewport_height=self.h)
_, depth = renderer.render(self.scene)
renderer.delete()
# Backproject points
point_image_cam = backproject_depth(
depth, self.fx, self.fy, self.cx, self.cy, default="NaN", normalizer=1.0
) # (3, h, w)
# Compute mesh from point image.
vertices = np.zeros((1), dtype=np.float32)
faces = np.zeros((1), dtype=np.int32)
compute_mesh_from_depth_c(point_image_cam, max_triangle_dist, vertices, faces)
# Apply extrinsics to the mesh.
vertices = transform_mesh(vertices, T_opengl_cv_3x3, np.zeros_like(t_camera_to_world))
vertices = transform_mesh(vertices, R_camera_to_world, t_camera_to_world)
# Store vertices and faces.
vertices_array.append(vertices)
faces_array.append(faces)
total_num_vertices += vertices.shape[0]
total_num_faces += faces.shape[0]
# Merge meshes into a single mesh.
vertices, faces = merge_meshes(vertices_array, faces_array, total_num_vertices, total_num_faces)
# Visualize mesh.
mesh = o3d.geometry.TriangleMesh(o3d.utility.Vector3dVector(vertices), o3d.utility.Vector3iVector(faces))
mesh.compute_vertex_normals()
# Transform to OpenGL coordinates
mesh = rotate_around_axis(mesh, axis_name="x", angle=-np.pi)
###################################################################################
# DEBUG
###################################################################################
if False:
o3d.visualization.draw_geometries([mesh])
###################################################################################
return mesh
def render_views(in_path):
try:
if "SPLITS" in in_path or in_path.endswith("json") or in_path.endswith("txt")or in_path.endswith("npz"):
return
if "a_t_pose" not in in_path:
return
print()
print()
print()
print("Processing", in_path)
print()
out_mesh_path = os.path.join(in_path, "mesh_watertight_poisson.ply")
if not OVERWRITE and os.path.exists(out_mesh_path):
print('Skipping {}'.format(out_mesh_path))
return
# Read mesh
mesh = trimesh.load(
os.path.join(in_path, 'mesh_normalized.off'),
process=False
)
# Prepare scene with mesh
scene = SimpleRenderer(mesh, w=w, h=h, fx=fx, fy=fy, cx=cx, cy=cy, znear=znear, zfar=zfar)
# Render views and compute merged mesh
merged_mesh = scene.compute_merged_mesh()
# Store merged mesh
merged_mesh_path = os.path.join(in_path, "merged_mesh.ply")
o3d.io.write_triangle_mesh(merged_mesh_path, merged_mesh)
# Run Screened Poisson
sp.check_output(
f'meshlabserver -i {merged_mesh_path} -o {out_mesh_path} -s {poisson_exec}',
shell=True
)
os.remove(merged_mesh_path)
print('Finished {}'.format(in_path))
except Exception as err:
print('Error with {}: {}'.format(in_path, traceback.format_exc()))
if __name__ == "__main__":
OVERWRITE = False
parser = argparse.ArgumentParser(
description='Run point cloud sampling'
)
parser.add_argument('-t', '-max_threads', dest='max_threads', type=int, default=-1)
args = parser.parse_args()
workspace_dir = "/home/pablo/workspace/neshpod/"
poisson_exec = os.path.join(workspace_dir, "data_processing/screened_poisson.mlx")
######################################################################################################
# Options
camera_setup = "icosahedron" # "icosahedron" or "octo"
num_subdivisions_ico = 1 # 0: 12 cameras | 1: 62 cameras | 2: 242 cameras
radius = 0.8
max_triangle_dist = 0.005
bb_min = -0.5
bb_max = 0.5
# General params for the camera
fx = 573.353
fy = 576.057
cx = 319.85
cy = 240.632
w = 640
h = 480
znear=0.05
zfar=5.0
######################################################################################################
try:
n_jobs = int(os.environ['SLURM_CPUS_ON_NODE'])
except:
n_jobs = -1
assert args.max_threads != 0
if args.max_threads > 0:
n_jobs = args.max_threads
print()
print(f"Using {n_jobs} ...")
print()
character_names = cfg.identities + cfg.identities_augmented
dataset_type = "datasets_multi"
for character_name in sorted(character_names):
ROOT = f'/cluster/lothlann/ppalafox/{dataset_type}/mixamo/{character_name}'
render_views(os.path.join(ROOT, "a_t_pose", "000000"))
|
[
"os.remove",
"argparse.ArgumentParser",
"open3d.visualization.draw_geometries",
"pyrender.Mesh.from_trimesh",
"os.path.join",
"numpy.zeros_like",
"os.path.dirname",
"os.path.exists",
"utils.image_proc.backproject_depth",
"traceback.format_exc",
"utils.pcd_utils.rotate_around_axis",
"NPMs._C.compute_mesh_from_depth",
"subprocess.check_output",
"utils.render_utils.CameraSetup",
"open3d.io.write_triangle_mesh",
"numpy.linalg.inv",
"open3d.utility.Vector3dVector",
"open3d.utility.Vector3iVector",
"pyrender.PointLight",
"numpy.zeros",
"pyrender.IntrinsicsCamera",
"utils.render_utils.compute_view_matrix",
"numpy.array",
"scipy.linalg.norm",
"numpy.matmul",
"pyrender.OffscreenRenderer",
"numpy.eye"
] |
[((1025, 1130), 'numpy.array', 'np.array', (['[[1.0, 0.0, 0.0, 0.0], [0.0, -1.0, 0.0, 0.0], [0.0, 0.0, -1.0, 0.0], [0.0, \n 0.0, 0.0, 1.0]]'], {}), '([[1.0, 0.0, 0.0, 0.0], [0.0, -1.0, 0.0, 0.0], [0.0, 0.0, -1.0, 0.0\n ], [0.0, 0.0, 0.0, 1.0]])\n', (1033, 1130), True, 'import numpy as np\n'), ((1169, 1232), 'numpy.array', 'np.array', (['[[1.0, 0.0, 0.0], [0.0, -1.0, 0.0], [0.0, 0.0, -1.0]]'], {}), '([[1.0, 0.0, 0.0], [0.0, -1.0, 0.0], [0.0, 0.0, -1.0]])\n', (1177, 1232), True, 'import numpy as np\n'), ((1361, 1412), 'numpy.zeros', 'np.zeros', (['(total_num_vertices, 3)'], {'dtype': 'np.float32'}), '((total_num_vertices, 3), dtype=np.float32)\n', (1369, 1412), True, 'import numpy as np\n'), ((1432, 1478), 'numpy.zeros', 'np.zeros', (['(total_num_faces, 3)'], {'dtype': 'np.int32'}), '((total_num_faces, 3), dtype=np.int32)\n', (1440, 1478), True, 'import numpy as np\n'), ((9513, 9576), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run point cloud sampling"""'}), "(description='Run point cloud sampling')\n", (9536, 9576), False, 'import argparse\n'), ((9785, 9852), 'os.path.join', 'os.path.join', (['workspace_dir', '"""data_processing/screened_poisson.mlx"""'], {}), "(workspace_dir, 'data_processing/screened_poisson.mlx')\n", (9797, 9852), False, 'import os, sys\n'), ((2623, 2697), 'utils.render_utils.CameraSetup', 'render_utils.CameraSetup', (['camera_setup', 'None', 'radius', 'num_subdivisions_ico'], {}), '(camera_setup, None, radius, num_subdivisions_ico)\n', (2647, 2697), True, 'import utils.render_utils as render_utils\n'), ((2721, 2798), 'pyrender.IntrinsicsCamera', 'pyrender.IntrinsicsCamera', ([], {'fx': 'fx', 'fy': 'fy', 'cx': 'cx', 'cy': 'cy', 'znear': 'znear', 'zfar': 'zfar'}), '(fx=fx, fy=fy, cx=cx, cy=cy, znear=znear, zfar=zfar)\n', (2746, 2798), False, 'import pyrender\n'), ((3113, 3159), 'pyrender.Mesh.from_trimesh', 'pyrender.Mesh.from_trimesh', (['mesh'], {'smooth': '(False)'}), '(mesh, smooth=False)\n', (3139, 3159), False, 'import pyrender\n'), ((4137, 4195), 'utils.render_utils.compute_view_matrix', 'render_utils.compute_view_matrix', (['eye', 'look_dir', 'camera_up'], {}), '(eye, look_dir, camera_up)\n', (4169, 4195), True, 'import utils.render_utils as render_utils\n'), ((4297, 4338), 'numpy.matmul', 'np.matmul', (['T_opengl_cv', 'T_world_to_camera'], {}), '(T_opengl_cv, T_world_to_camera)\n', (4306, 4338), True, 'import numpy as np\n'), ((4673, 4687), 'scipy.linalg.norm', 'norm', (['unit_vec'], {}), '(unit_vec)\n', (4677, 4687), False, 'from scipy.linalg import expm, norm\n'), ((4834, 4860), 'numpy.array', 'np.array', (['[0.0, -1.0, 0.0]'], {}), '([0.0, -1.0, 0.0])\n', (4842, 4860), True, 'import numpy as np\n'), ((4887, 4945), 'utils.render_utils.compute_view_matrix', 'render_utils.compute_view_matrix', (['eye', 'look_dir', 'camera_up'], {}), '(eye, look_dir, camera_up)\n', (4919, 4945), True, 'import utils.render_utils as render_utils\n'), ((7530, 7583), 'utils.pcd_utils.rotate_around_axis', 'rotate_around_axis', (['mesh'], {'axis_name': '"""x"""', 'angle': '(-np.pi)'}), "(mesh, axis_name='x', angle=-np.pi)\n", (7548, 7583), False, 'from utils.pcd_utils import BBox, transform_pointcloud_to_opengl_coords, rotate_around_axis, origin, normalize_transformation\n'), ((8335, 8387), 'os.path.join', 'os.path.join', (['in_path', '"""mesh_watertight_poisson.ply"""'], {}), "(in_path, 'mesh_watertight_poisson.ply')\n", (8347, 8387), False, 'import os, sys\n'), ((8965, 9005), 'os.path.join', 'os.path.join', (['in_path', '"""merged_mesh.ply"""'], {}), "(in_path, 'merged_mesh.ply')\n", (8977, 9005), False, 'import os, sys\n'), ((9014, 9071), 'open3d.io.write_triangle_mesh', 'o3d.io.write_triangle_mesh', (['merged_mesh_path', 'merged_mesh'], {}), '(merged_mesh_path, merged_mesh)\n', (9040, 9071), True, 'import open3d as o3d\n'), ((9120, 9234), 'subprocess.check_output', 'sp.check_output', (['f"""meshlabserver -i {merged_mesh_path} -o {out_mesh_path} -s {poisson_exec}"""'], {'shell': '(True)'}), "(\n f'meshlabserver -i {merged_mesh_path} -o {out_mesh_path} -s {poisson_exec}'\n , shell=True)\n", (9135, 9234), True, 'import subprocess as sp\n'), ((9268, 9295), 'os.remove', 'os.remove', (['merged_mesh_path'], {}), '(merged_mesh_path)\n', (9277, 9295), False, 'import os, sys\n'), ((60, 85), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (75, 85), False, 'import os, sys\n'), ((3682, 3739), 'pyrender.PointLight', 'pyrender.PointLight', ([], {'color': '[1.0, 1.0, 1.0]', 'intensity': '(0.7)'}), '(color=[1.0, 1.0, 1.0], intensity=0.7)\n', (3701, 3739), False, 'import pyrender\n'), ((4558, 4583), 'numpy.array', 'np.array', (['[0.0, 1.0, 0.0]'], {}), '([0.0, 1.0, 0.0])\n', (4566, 4583), True, 'import numpy as np\n'), ((4632, 4651), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (4640, 4651), True, 'import numpy as np\n'), ((5514, 5546), 'numpy.linalg.inv', 'np.linalg.inv', (['T_world_to_camera'], {}), '(T_world_to_camera)\n', (5527, 5546), True, 'import numpy as np\n'), ((6094, 6167), 'pyrender.OffscreenRenderer', 'pyrender.OffscreenRenderer', ([], {'viewport_width': 'self.w', 'viewport_height': 'self.h'}), '(viewport_width=self.w, viewport_height=self.h)\n', (6120, 6167), False, 'import pyrender\n'), ((6315, 6410), 'utils.image_proc.backproject_depth', 'backproject_depth', (['depth', 'self.fx', 'self.fy', 'self.cx', 'self.cy'], {'default': '"""NaN"""', 'normalizer': '(1.0)'}), "(depth, self.fx, self.fy, self.cx, self.cy, default='NaN',\n normalizer=1.0)\n", (6332, 6410), False, 'from utils.image_proc import backproject_depth\n'), ((6518, 6547), 'numpy.zeros', 'np.zeros', (['(1)'], {'dtype': 'np.float32'}), '(1, dtype=np.float32)\n', (6526, 6547), True, 'import numpy as np\n'), ((6570, 6597), 'numpy.zeros', 'np.zeros', (['(1)'], {'dtype': 'np.int32'}), '(1, dtype=np.int32)\n', (6578, 6597), True, 'import numpy as np\n'), ((6613, 6691), 'NPMs._C.compute_mesh_from_depth', 'compute_mesh_from_depth_c', (['point_image_cam', 'max_triangle_dist', 'vertices', 'faces'], {}), '(point_image_cam, max_triangle_dist, vertices, faces)\n', (6638, 6691), True, 'from NPMs._C import compute_mesh_from_depth as compute_mesh_from_depth_c\n'), ((7361, 7397), 'open3d.utility.Vector3dVector', 'o3d.utility.Vector3dVector', (['vertices'], {}), '(vertices)\n', (7387, 7397), True, 'import open3d as o3d\n'), ((7399, 7432), 'open3d.utility.Vector3iVector', 'o3d.utility.Vector3iVector', (['faces'], {}), '(faces)\n', (7425, 7432), True, 'import open3d as o3d\n'), ((7816, 7857), 'open3d.visualization.draw_geometries', 'o3d.visualization.draw_geometries', (['[mesh]'], {}), '([mesh])\n', (7849, 7857), True, 'import open3d as o3d\n'), ((8418, 8447), 'os.path.exists', 'os.path.exists', (['out_mesh_path'], {}), '(out_mesh_path)\n', (8432, 8447), False, 'import os, sys\n'), ((8585, 8629), 'os.path.join', 'os.path.join', (['in_path', '"""mesh_normalized.off"""'], {}), "(in_path, 'mesh_normalized.off')\n", (8597, 8629), False, 'import os, sys\n'), ((10971, 11011), 'os.path.join', 'os.path.join', (['ROOT', '"""a_t_pose"""', '"""000000"""'], {}), "(ROOT, 'a_t_pose', '000000')\n", (10983, 11011), False, 'import os, sys\n'), ((2531, 2561), 'numpy.array', 'np.array', (['[1.0, 1.0, 1.0, 1.0]'], {}), '([1.0, 1.0, 1.0, 1.0])\n', (2539, 2561), True, 'import numpy as np\n'), ((2841, 2850), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (2847, 2850), True, 'import numpy as np\n'), ((6802, 6834), 'numpy.zeros_like', 'np.zeros_like', (['t_camera_to_world'], {}), '(t_camera_to_world)\n', (6815, 6834), True, 'import numpy as np\n'), ((3791, 3820), 'numpy.linalg.inv', 'np.linalg.inv', (['world_to_light'], {}), '(world_to_light)\n', (3804, 3820), True, 'import numpy as np\n'), ((9422, 9444), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (9442, 9444), False, 'import traceback\n')]
|
import argparse
import tensorflow as tf
import numpy as np
from tfbldr.datasets import fetch_mnist
from collections import namedtuple
import sys
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import copy
parser = argparse.ArgumentParser()
parser.add_argument('direct_model', nargs=1, default=None)
parser.add_argument('--model', dest='model_path', type=str, default=None)
parser.add_argument('--seed', dest='seed', type=int, default=1999)
args = parser.parse_args()
if args.model_path == None:
if args.direct_model == None:
raise ValueError("Must pass first positional argument as model, or --model argument, e.g. summary/experiment-0/models/model-7")
else:
model_path = args.direct_model[0]
else:
model_path = args.model_path
random_state = np.random.RandomState(args.seed)
config = tf.ConfigProto(
device_count={'GPU': 0}
)
d = np.load("music_data_jos.npz")
image_data = d["measures"]
idx = np.concatenate((np.arange(len(image_data))[:, None], np.arange(len(image_data))[:, None], np.arange(len(image_data))[:, None]), axis=1)
image_data = np.concatenate((image_data[..., 0][..., None],
image_data[..., 1][..., None],
image_data[..., 2][..., None]), axis=0)
idx = np.concatenate((idx[:, 0], idx[:, 1], idx[:, 2]), axis=0)
which_voice = np.zeros_like(idx)
which_voice[::3] = 0
which_voice[1::3] = 1
which_voice[2::3] = 2
shuffle_random = np.random.RandomState(112)
ii = np.arange(len(image_data))
shuffle_random.shuffle(ii)
image_data = image_data[ii]
idx = idx[ii]
which_voice = which_voice[ii]
bs = 50
train_image_data = image_data[:-5000]
et = len(train_image_data) - len(train_image_data) % bs
train_image_data = train_image_data[:et]
train_idx = idx[:-5000]
train_idx = train_idx[:et]
train_which_voice = which_voice[:-5000]
train_which_voice = train_which_voice[:et]
valid_image_data = image_data[-5000:]
ev = len(valid_image_data) - len(valid_image_data) % bs
valid_image_data = valid_image_data[:ev]
valid_idx = idx[-5000:]
valid_idx = valid_idx[:ev]
valid_which_voice = which_voice[-5000:]
valid_which_voice = valid_which_voice[:ev]
with tf.Session(config=config) as sess:
saver = tf.train.import_meta_graph(model_path + '.meta')
saver.restore(sess, model_path)
fields = ['images',
'bn_flag',
'z_e_x',
'z_q_x',
'z_i_x',
'x_tilde']
vs = namedtuple('Params', fields)(
*[tf.get_collection(name)[0] for name in fields]
)
assert len(train_image_data) % bs == 0
assert len(valid_image_data) % bs == 0
train_z_i = []
for i in range(len(train_image_data) // bs):
print("Train minibatch {}".format(i))
x = train_image_data[i * bs:(i + 1) * bs]
feed = {vs.images: x,
vs.bn_flag: 1.}
outs = [vs.z_e_x, vs.z_q_x, vs.z_i_x, vs.x_tilde]
r = sess.run(outs, feed_dict=feed)
x_rec = r[-1]
z_i = r[-2]
train_z_i += [zz[:, :, None] for zz in z_i]
train_z_i = np.array(train_z_i)
valid_z_i = []
for i in range(len(valid_image_data) // bs):
print("Valid minibatch {}".format(i))
x = valid_image_data[i * bs:(i + 1) * bs]
feed = {vs.images: x,
vs.bn_flag: 1.}
outs = [vs.z_e_x, vs.z_q_x, vs.z_i_x, vs.x_tilde]
r = sess.run(outs, feed_dict=feed)
x_rec = r[-1]
z_i = r[-2]
valid_z_i += [zz[:, :, None] for zz in z_i]
valid_z_i = np.array(valid_z_i)
train_conditions = []
twv = np.array(copy.deepcopy(train_which_voice))
tri = np.array(copy.deepcopy(train_idx))
ce = np.array(copy.deepcopy(d['centers']))
left_lu = tri - 1
left_lu[left_lu < 0] = 0
li = ce[left_lu]
left = np.array([lli[ttwv] for ttwv, lli in zip(twv, li)])
right_lu = tri + 1
right_lu[right_lu > max(tri)] = max(tri)
ri = ce[right_lu]
right = np.array([rri[ttwv] for ttwv, rri in zip(twv, ri)])
mid_lu = tri
mi = ce[mid_lu]
mid = np.array([mmi[ttwv] for ttwv, mmi in zip(twv, mi)])
train_conditions = list(zip(mid - left, mid - right))
vwv = np.array(copy.deepcopy(valid_which_voice))
vri = np.array(copy.deepcopy(valid_idx))
left_lu = vri - 1
left_lu[left_lu < 0] = 0
li = ce[left_lu]
left = np.array([lli[vvwv] for vvwv, lli in zip(vwv, li)])
right_lu = vri + 1
right_lu[right_lu > max(vri)] = max(vri)
ri = ce[right_lu]
right = np.array([rri[vvwv] for vvwv, rri in zip(vwv, ri)])
mid_lu = vri
mi = ce[mid_lu]
mid = np.array([mmi[vvwv] for vvwv, mmi in zip(vwv, mi)])
valid_conditions = list(zip(mid - left, mid - right))
mapper_values = sorted(list(set(train_conditions)) + [(None, None)])
condition_lookup = {v: k for k, v in enumerate(mapper_values)}
def mapper(c):
return np.array([condition_lookup[ci] if ci in condition_lookup else condition_lookup[(None, None)] for ci in c])[:, None]
train_image_data = train_z_i
val_image_data = valid_z_i
train_labels = mapper(train_conditions)
valid_labels = mapper(valid_conditions)
np.savez("vq_vae_encoded_music.npz", train_z_i=train_z_i, valid_z_i=valid_z_i, train_conditions=train_conditions, valid_conditions=valid_conditions, train_labels=train_labels, valid_labels=valid_labels, mapper_values=mapper_values)
|
[
"numpy.load",
"numpy.zeros_like",
"copy.deepcopy",
"argparse.ArgumentParser",
"tensorflow.train.import_meta_graph",
"tensorflow.get_collection",
"tensorflow.Session",
"numpy.random.RandomState",
"tensorflow.ConfigProto",
"matplotlib.use",
"numpy.array",
"collections.namedtuple",
"numpy.savez",
"numpy.concatenate"
] |
[((163, 184), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (177, 184), False, 'import matplotlib\n'), ((240, 265), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (263, 265), False, 'import argparse\n'), ((798, 830), 'numpy.random.RandomState', 'np.random.RandomState', (['args.seed'], {}), '(args.seed)\n', (819, 830), True, 'import numpy as np\n'), ((841, 880), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'device_count': "{'GPU': 0}"}), "(device_count={'GPU': 0})\n", (855, 880), True, 'import tensorflow as tf\n'), ((892, 921), 'numpy.load', 'np.load', (['"""music_data_jos.npz"""'], {}), "('music_data_jos.npz')\n", (899, 921), True, 'import numpy as np\n'), ((1104, 1226), 'numpy.concatenate', 'np.concatenate', (['(image_data[..., 0][..., None], image_data[..., 1][..., None], image_data[\n ..., 2][..., None])'], {'axis': '(0)'}), '((image_data[..., 0][..., None], image_data[..., 1][..., None\n ], image_data[..., 2][..., None]), axis=0)\n', (1118, 1226), True, 'import numpy as np\n'), ((1286, 1343), 'numpy.concatenate', 'np.concatenate', (['(idx[:, 0], idx[:, 1], idx[:, 2])'], {'axis': '(0)'}), '((idx[:, 0], idx[:, 1], idx[:, 2]), axis=0)\n', (1300, 1343), True, 'import numpy as np\n'), ((1358, 1376), 'numpy.zeros_like', 'np.zeros_like', (['idx'], {}), '(idx)\n', (1371, 1376), True, 'import numpy as np\n'), ((1459, 1485), 'numpy.random.RandomState', 'np.random.RandomState', (['(112)'], {}), '(112)\n', (1480, 1485), True, 'import numpy as np\n'), ((5016, 5261), 'numpy.savez', 'np.savez', (['"""vq_vae_encoded_music.npz"""'], {'train_z_i': 'train_z_i', 'valid_z_i': 'valid_z_i', 'train_conditions': 'train_conditions', 'valid_conditions': 'valid_conditions', 'train_labels': 'train_labels', 'valid_labels': 'valid_labels', 'mapper_values': 'mapper_values'}), "('vq_vae_encoded_music.npz', train_z_i=train_z_i, valid_z_i=\n valid_z_i, train_conditions=train_conditions, valid_conditions=\n valid_conditions, train_labels=train_labels, valid_labels=valid_labels,\n mapper_values=mapper_values)\n", (5024, 5261), True, 'import numpy as np\n'), ((2172, 2197), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (2182, 2197), True, 'import tensorflow as tf\n'), ((2219, 2267), 'tensorflow.train.import_meta_graph', 'tf.train.import_meta_graph', (["(model_path + '.meta')"], {}), "(model_path + '.meta')\n", (2245, 2267), True, 'import tensorflow as tf\n'), ((3073, 3092), 'numpy.array', 'np.array', (['train_z_i'], {}), '(train_z_i)\n', (3081, 3092), True, 'import numpy as np\n'), ((3532, 3551), 'numpy.array', 'np.array', (['valid_z_i'], {}), '(valid_z_i)\n', (3540, 3551), True, 'import numpy as np\n'), ((3590, 3622), 'copy.deepcopy', 'copy.deepcopy', (['train_which_voice'], {}), '(train_which_voice)\n', (3603, 3622), False, 'import copy\n'), ((3639, 3663), 'copy.deepcopy', 'copy.deepcopy', (['train_idx'], {}), '(train_idx)\n', (3652, 3663), False, 'import copy\n'), ((3679, 3706), 'copy.deepcopy', 'copy.deepcopy', (["d['centers']"], {}), "(d['centers'])\n", (3692, 3706), False, 'import copy\n'), ((4126, 4158), 'copy.deepcopy', 'copy.deepcopy', (['valid_which_voice'], {}), '(valid_which_voice)\n', (4139, 4158), False, 'import copy\n'), ((4175, 4199), 'copy.deepcopy', 'copy.deepcopy', (['valid_idx'], {}), '(valid_idx)\n', (4188, 4199), False, 'import copy\n'), ((2456, 2484), 'collections.namedtuple', 'namedtuple', (['"""Params"""', 'fields'], {}), "('Params', fields)\n", (2466, 2484), False, 'from collections import namedtuple\n'), ((4762, 4872), 'numpy.array', 'np.array', (['[(condition_lookup[ci] if ci in condition_lookup else condition_lookup[None,\n None]) for ci in c]'], {}), '([(condition_lookup[ci] if ci in condition_lookup else\n condition_lookup[None, None]) for ci in c])\n', (4770, 4872), True, 'import numpy as np\n'), ((2496, 2519), 'tensorflow.get_collection', 'tf.get_collection', (['name'], {}), '(name)\n', (2513, 2519), True, 'import tensorflow as tf\n')]
|
'''
Hyperspectral Instrument Data Resemblance Algorithm - A simple modular Python simulator for low-resolution spectroscopy
Made by <NAME>, as a part of the thesis work.
2020
'''
import numpy as np
import matplotlib.pyplot as plt
import input_file as inp
import HIDRA
import scipy.signal
import random
def rms(x):
return np.sqrt(np.mean(x**2))
def int_r(r1, r2, rang):
from scipy.interpolate import interp1d
x= np.arange(len(r1))
xnew = np.arange(0, len(r1), 0.001)
f1 = interp1d(x, r1, kind=3, fill_value="extrapolate")
f2 = interp1d(x, r2, kind=3, fill_value="extrapolate")
r1_int = f1(xnew)
r2_int = f2(xnew)
return r1_int, r2_int
#### SETUP PHASE ####
# def sinusoidal(size, frequency, amplitude, phase):
# x = np.zeros((size))
# y = np.zeros((size))
# # omega0 = 1
# # deltax = np.random.random()*np.pi #Random phases
# # deltay = np.random.random()*np.pi
# # ax = 1
# # ay = 1
# for i in range(0,size):
# x[i] = amplitude * np.cos(frequency * i - phase) #next x-value, using the new phase
# x[i] = noise_test(x[i])
# y[i] = amplitude * np.sin(frequency * i - phase) #and new y-value of coord.
# y[i] = noise_test(y[i])
from datetime import datetime
startTime = datetime.now()
f = open("out.txt", "a")
# del x_j, y_j
# New jitter files:
# for i in range(1, 21):
# x_j, y_j = simfun.func_jitter(entries=(exp*inp.step), gain=0.15, dt=5)
# j = np.vstack([x_j, y_j])
# np.save("runs/jitter/j"+str(i), j)
def main(N):
spec_eff, spec_eff2, jitter, x_j, y_j, img_size, sub_pixel, pl_arc_mm, disper, mask, slitpos, background = HIDRA.setup(inp)
# x, y = HIDRA.sinusoidal(1000, frequency=100, amplitude=10, phase=1)
# test = HIDRA.jitter_im(x,y, (101,101))
# plt.imshow(test)
# plt.plot(x,y)
# jitter
freq = [random.randrange(0, 5, 1)/1000, random.randrange(0, 10, 1)/1000, random.randrange(0, 100, 1)/1000, random.randrange(0, 1000, 1)/1000, random.randrange(0, 10000, 1)/1000]
ampl = [random.randrange(0, 1, 1)/1000, random.randrange(0, 10, 1)/1000, random.randrange(0, 100, 1)/1000, random.randrange(0, 1000, 1)/1000, random.randrange(0, 10000, 1)/1000]
x_jit = HIDRA.sinusoidal(size=inp.exp*inp.step, frequency=freq, amplitude= ampl, phase=1)
freq = [random.randrange(0, 5, 1)/1000, random.randrange(0, 10, 1)/1000, random.randrange(0, 100, 1)/1000, random.randrange(0, 1000, 1)/1000, random.randrange(0, 10000, 1)/1000]
ampl = [random.randrange(0, 1, 1)/1000, random.randrange(0, 10, 1)/1000, random.randrange(0, 100, 1)/1000, random.randrange(0, 1000, 1)/1000, random.randrange(0, 10000, 1)/1000]
y_jit = HIDRA.sinusoidal(size=inp.exp*inp.step, frequency=freq, amplitude= ampl, phase=1)
# jitter = HIDRA.jitter_im(x_jit, y_jit, (101, 101))
# for i in range(10):
# x_temp, y_temp = HIDRA.sinusoidal(size=inp.exp*inp.step, (10, 10), amplitude=(10, 10), phase=(-1,1))
# x_jit = x_jit + x_temp
# y_jit = y_jit + y_temp
# x_jit, y_jit = HIDRA.sinusoidal(size=inp.exp*inp.step, (1, 1), amplitude=(10, 10), phase=(-1,1))
# x_temp, y_temp = HIDRA.sinusoidal(size=inp.exp*inp.step, (10, 10), amplitude=(10, 10), phase=(-1,1))
# print(x_jit)
# jitter = HIDRA.jitter_im(x_jit, y_jit, (101, 101))
# in_spec = np.loadtxt(inp.in_spec)
# in_spec2 = np.loadtxt(inp.in_spec2)
wl_ran = inp.wl_ran
# exp = inp.exp
# slit = inp.slit
CCD = np.load(inp.in_CCD)
#### SETUP PHASE COMPLETE ####
#### IMAGE FORMATION BEGINS ####
image1, image_wl1=HIDRA.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_ends=[15, 45], pos=slitpos, image_size=img_size,
dispersion=disper, eff=spec_eff, mask_img=mask, steps=1, plot='n')
spectrum1 = HIDRA.prep_func(image1, CCD, sub_pixel, wl_ran)
# startTime2 = datetime.now()
for j in spectrum1:
f.write("{:e} , ".format(j))
f.write("\n")
# print(datetime.now() - startTime2)
return spectrum1, image1, image_wl1
spectrum1, image1, image_wl1 = main(1)
spec_eff, spec_eff2, jitter, x_j, y_j, img_size, sub_pixel, pl_arc_mm, disper, mask, slitpos, background = HIDRA.setup(inp)
wl_ran = inp.wl_ran
CCD = np.load(inp.in_CCD)
image2, image_wl2 = HIDRA.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_ends=[15, 45], pos=slitpos, image_size=img_size,
dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
spectrum2 = HIDRA.prep_func(image2, CCD, sub_pixel, wl_ran)
ro, ri, wave, delta = HIDRA.transmission_spec_func(spectrum1, spectrum2, wl_ran, disper, slitpos, img_size)
# (image=ro, image2=ri, sub_pixel=sub_pixel, wl_ran=inp.wl_ran, disper=disper,
# slitpos=slitpos, img_size=img_size, move="y", noiseinp="n")
plt.plot(wave, (ro-ri)/ro)
f.close()
fd=open("out.txt","r")
d=fd.read()
fd.close()
m=d.split("\n")
s="\n".join(m[:-1])
fd=open("out.txt","w+")
for i in range(len(s)):
fd.write(s[i])
fd.close()
f = open("out.txt", "a")
def get_size(fileobject):
fileobject.seek(0,2) # move the cursor to the end of the file
size = fileobject.tell()
return size
#and then
fsize = get_size(f)
f.truncate(fsize - 1)
f.close()
print(datetime.now() - startTime)
# print(spectrum1)
# for i in range(1):
# r = spectrum1
# for j in r:
# f.write("{:e} , ".format(j))
# f.write("\n")
# f.close()
'''
image2, image_wl2=HIDRA.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_ends=[15, 45], pos=slitpos, image_size=img_size,
dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
ro = image1
ri = image2
no = HIDRA.noise(size=ro.shape, image=ro)
ni = HIDRA.noise(size=ri.shape, image=ri)
ri = ri+ni
ro = ro+no
del no, ni, image1, image2
ro, ri, wave, delta = HIDRA.the_thing(image=ro, image2=ri, sub_pixel=sub_pixel, wl_ran=inp.wl_ran, disper=disper,
slitpos=slitpos, img_size=img_size, move="y", noiseinp="n")
with open('ri.txt','w') as f:
np.savetxt(f, [ri], delimiter=',')
with open('ro.txt','w') as f:
np.savetxt(f, [ro], delimiter=',')
del ri, ro, wave, delta
'''
# for i in range(10):
# x_j, y_j = simfun.func_jitter(entries=(exp*inp.step), gain=0.15, dt=5)
# jitter = np.vstack([x_j, y_j])
# spec_eff, spec_eff2, jitter, x_j, y_j, psf, img_size, sub_pixel, pl_arc_mm, disper, mask, slitpos, background = simfun.setup(inp)
# in_spec = np.loadtxt(inp.in_spec)
# in_spec2 = np.loadtxt(inp.in_spec2)
# wl_ran = inp.wl_ran
# exp = inp.exp
# slit = inp.slit
# CCD = np.load(inp.in_CCD)
# image1, image_wl1=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_ends=[15, 45], pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff, mask_img=mask, steps=1, plot='n')
# image2, image_wl2=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_ends=[15, 45], pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
# ro = image1
# ri = image2
# no = simfun.noise(size=ro.shape, image=ro)
# ni = simfun.noise(size=ri.shape, image=ri)
# ri = ri+ni
# ro = ro+no
# del no, ni, image1, image2
# ro, ri, wave, delta = the_thing(ro, ri)
# with open('ri.txt','a') as f:
# np.savetxt(f, [ri], delimiter=',')
# with open('ro.txt','a') as f:
# np.savetxt(f, [ro], delimiter=',')
# test=np.loadtxt("test.txt", delimiter=",")
# plt.figure()
# plt.plot(wave, ro/ri)
# plt.plot(in_spec[:,0], in_spec[:,1]/in_spec2[:,1])
# # plt.fill_between(wave, ((ro_mean-ro_std)/(ri_mean-ri_std)), ((ro_mean+ro_std)/(ri_mean+ri_std)), alpha=0.2)
# plt.legend(["Output", "Input", "1 $\sigma$"], fontsize=14)
# plt.tick_params(labelsize=12)
# plt.xlabel("Wavelength [$nm$]", size=14)
# plt.ylabel("Ratio", size=14)
# plt.grid()
# ======================================================================================================================
# slit = ['pix', 2, 3.5]
# slit_size = simfun.convert_slit(unit = slit[0], size = slit[1:3], convert_factor = pl_arc_mm) #Convert slit size to pixels
# slit_size[0] = slit_size[0]*sub_pixel #Convert to subpixels
# slit_size[1] = slit_size[1]*sub_pixel
# slitpos = [150, 249] #Slit position on the sub-pixel CCD image. Arbitrary position..
# mask = simfun.func_slit(slit_size = np.floor(slit_size).astype(int), pos=slitpos, image_size=img_size)
# mask = np.ones((img_size))
# for i in range(1, 11):
# jitter = np.load("jitter/j"+str(i)+".npy")
# jitter = simfun.jitter_im(x= jitter[0,:], y= jitter[1,:], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
# jitter2 = np.load("jitter/j"+str(i+10)+".npy")
# jitter2 = simfun.jitter_im(x= jitter2[0,:], y= jitter2[1,:], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
# image1, image1_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff, mask_img=mask, steps=1, plot='n')
# image2, image2_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter2, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
# #np.save("e600/slit"+str(slit[1])+"/r"+str(i)+"/out", image1)
# np.save("e300_bigpsf_noslit/r"+str(i)+"/out", image1)
# np.save("e300_bigpsf_noslit/r"+str(i)+"/in", image2)
# # ======================================================================================================================
# slit = ['pix', 2, 3.5]
# slit_size = simfun.convert_slit(unit = slit[0], size = slit[1:3], convert_factor = pl_arc_mm) #Convert slit size to pixels
# slit_size[0] = slit_size[0]*sub_pixel #Convert to subpixels
# slit_size[1] = slit_size[1]*sub_pixel
# slitpos = [150, 249] #Slit position on the sub-pixel CCD image. Arbitrary position..
# mask = simfun.func_slit(slit_size = np.floor(slit_size).astype(int), pos=slitpos, image_size=img_size)
# for i in range(1, 11):
# jitter = np.load("jitter/j"+str(i)+".npy")
# jitter = simfun.jitter_im(x= jitter[:,0], y= jitter[:,1], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
# jitter2 = np.load("jitter/j"+str(i+10)+".npy")
# jitter2 = simfun.jitter_im(x= jitter2[:,0], y= jitter2[:,1], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
# image1, image1_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff, mask_img=mask, steps=1, plot='n')
# image2, image2_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter2, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
# np.save("1200/slit"+str(slit[1])+"/r"+str(i)+"/out", image1)
# np.save("1200/slit"+str(slit[1])+"/r"+str(i)+"/in", image2)
# # ======================================================================================================================
# slit = ['pix', 4, 3.5]
# slit_size = simfun.convert_slit(unit = slit[0], size = slit[1:3], convert_factor = pl_arc_mm) #Convert slit size to pixels
# slit_size[0] = slit_size[0]*sub_pixel #Convert to subpixels
# slit_size[1] = slit_size[1]*sub_pixel
# slitpos = [150, 249] #Slit position on the sub-pixel CCD image. Arbitrary position..
# mask = simfun.func_slit(slit_size = np.floor(slit_size).astype(int), pos=slitpos, image_size=img_size)
# for i in range(1, 11):
# jitter = np.load("runs/jitter/j"+str(i)+".npy")
# jitter = simfun.jitter_im(x= jitter[:,0], y= jitter[:,1], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
# jitter2 = np.load("runs/jitter/j"+str(i+10)+".npy")
# jitter2 = simfun.jitter_im(x= jitter2[:,0], y= jitter2[:,1], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
# image1, image1_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff, mask_img=mask, steps=1, plot='n')
# image2, image2_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter2, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
# np.save("runs/slit"+str(slit[1])+"/r"+str(i)+"/out", image1)
# np.save("runs/slit"+str(slit[1])+"/r"+str(i)+"/in", image2)
# # ======================================================================================================================
# slit = ['pix', 12, 12]
# slit_size = simfun.convert_slit(unit = slit[0], size = slit[1:3], convert_factor = pl_arc_mm) #Convert slit size to pixels
# slit_size[0] = slit_size[0]*sub_pixel #Convert to subpixels
# slit_size[1] = slit_size[1]*sub_pixel
# slitpos = [150, 249] #Slit position on the sub-pixel CCD image. Arbitrary position..
# mask = simfun.func_slit(slit_size = np.floor(slit_size).astype(int), pos=slitpos, image_size=img_size)
# for i in range(1, 11):
# jitter = np.load("runs/jitter/j"+str(i)+".npy")
# jitter = simfun.jitter_im(x= jitter[:,0], y= jitter[:,1], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
# jitter2 = np.load("runs/jitter/j"+str(i+10)+".npy")
# jitter2 = simfun.jitter_im(x= jitter2[:,0], y= jitter2[:,1], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
# image1, image1_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff, mask_img=mask, steps=1, plot='n')
# image2, image2_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter2, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
# np.save("runs/slit"+str(slit[1])+"/r"+str(i)+"/out", image1)
# np.save("runs/slit"+str(slit[1])+"/r"+str(i)+"/in", image2)
'''
check = input("Run simulation? y/n: ")
if check == "y":
image, image_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_img=psf, pos=slitpos, image_size=img_size,
dispersion=disper, eff=spec_eff, mask_img=mask, steps=1, plot='n')
# lam = (image_wl+np.mean([wl_ran[0], wl_ran[1]])*10**(-80))/(image+10**(-80)) #To find the dispersion wavelength (for calibrating later)
# summed = simfun.bin_sum(image, sub_pixel)
# r1 = simfun.read_out(summed)
number = round(rms(y_j*(pl_arc_mm/sub_pixel)), 3)
if "." in str(number):
jin = str(number).split(".")[0]
jid = str(number).split(".")[-1]
number = round(slit[1]*(pl_arc_mm), 3)
if "." in str(number):
sn = str(number).split(".")[0]
sd = str(number).split(".")[-1]
name = 'out'+'-e'+str(exp)+'-j'+jin+'_'+jid+'-s'+sn+'_'+sd
np.save('runs/'+name, image)1
np.save('runs/'+name+'_wl', image_wl)
#New jitter, and redo simulation, now with spec_eff2
x_j2, y_j2 = simfun.func_jitter(entries=(exp*inp.step), gain=0.15, dt=5)
# x_j2, y_j2 = simfun.jitter(entries=(exp*step), gain=0.02, dt=10)
jitter2 = np.stack((x_j2, y_j2), axis=-1)
jitter2 = simfun.jitter_im(x= jitter2[:,0], y= jitter2[:,1], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]) )
image2, image2_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter2, psf_img=psf, pos=slitpos, image_size=img_size,
dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
number = round(rms(y_j2*(pl_arc_mm/sub_pixel)), 3)
if "." in str(number):
jin = str(number).split(".")[0]
jid = str(number).split(".")[-1]
number = round(slit[1]*(pl_arc_mm), 3)
if "." in str(number):
sn = str(number).split(".")[0]
sd = str(number).split(".")[-1]
name2 = 'in'+'-e'+str(exp)+'-j'+jin+'_'+jid+'-s'+sn+'_'+sd
np.save('runs/'+name2, image2)
np.save('runs/'+name2+'_wl', image2_wl)
'''
# control_in = np.load('runs/in-e300-j1_357-s7_054.npy')
# sumcin = simfun.bin_sum(control_in, 10)
# rcin = simfun.read_out(sumcin)
# control_out = np.load('runs/out-e300-j1_372-s7_054.npy')
# sumcout = simfun.bin_sum(control_out, 10)
# rcout = simfun.read_out(sumcout)
# jit_out = np.load('runs/out-e300-j2_407-s7_054.npy')
# sumjitout = simfun.bin_sum(jit_out, 10)
# rjitout = simfun.read_out(sumjitout)
# jit_in = np.load('runs/in-e300-j2_853-s7_054.npy')
# sumjitin = simfun.bin_sum(jit_in, 10)
# rjitin = simfun.read_out(sumjitin)
# rcout = rjitout
# rcin = rjitin
# from scipy.signal import find_peaks
# peak_ref, _ = find_peaks(in_spec[:,1]*(-1), threshold=4, prominence=200, distance=3)
# # peaks, _ = find_peaks(-1*rcout, threshold=2.6e5, distance=3, prominence=500000)
# peaks, _ = find_peaks(-1*rcout, threshold=1.65e5, distance=1, prominence=500000)
# peaks = peaks[:-1]
# peak_ref = peak_ref[1:]
# from scipy.stats import linregress #Function to find a and b for a linear dispersion function. This of course only works as the dispersion is linear!
# a, b, r, p, s = linregress(peaks, in_spec[:,0][peak_ref])
# wavelength = a*np.arange(img_size[1]/10)+(b) #Making the new wavelength array for the readout values.
# del a, b, r, p, s
# wave = wavelength[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
# r1 = rcout[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
# r2 = rcin[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
# bin_width=100
# cr1 = [sum(r1[i:i+bin_width]) for i in range(50, len(r1), bin_width)]
# cr2 = [sum(r2[i:i+bin_width]) for i in range(50, len(r2), bin_width)]
# foo = np.zeros((int(r1.shape[0]/bin_width), 9))
# for i in range(7):
# a = cr1[i]
# foo[i] = np.array([cr1[i]/a, cr1[i]/a, cr1[i]/a, cr2[i]/a, cr2[i]/a, cr2[i]/a, cr1[i]/a, cr1[i]/a, cr1[i]/a ])
# color = ['purple', 'blue', 'cyan', 'green', 'yellow', 'orange', 'red']
# for i in range(7): plt.plot(foo[i,:], '.', markersize=13, color=color[i])
# for i in range(7): plt.plot(foo[i,:], '-', color=color[i])
# plt.xlabel("Time, arbitrary units", size=14)
# plt.ylabel("Relative luminosity", size=14)
# plt.tick_params(labelsize=14)
# plt.grid()
def noise1d(x, RON=5):
noise = np.zeros((x.shape))
for i in range(x.shape[0]):
for j in range(x.shape[1]):
noise[i,j] = (np.sqrt(x[i,j])+RON)*np.random.normal(0,1)
return noise
'''
def the_thing(image, image2, sub_pixel=sub_pixel, wl_ran=wl_ran, disper=disper, slitpos=slitpos, move="y", noiseinp="n"):
rout = simfun.read_out(simfun.bin_sum(image, sub_pixel))
rin = simfun.read_out(simfun.bin_sum(image2, sub_pixel)) #sum image and readout
r1, r2 = int_r(rout, rin, wl_ran) #Interpolate to higher resolution
print("\nBinning and read-out done")
if move == "y":
autocor = scipy.signal.correlate(r1, r1, mode="same") #perform autocorr.
cor = scipy.signal.correlate(r1, r2, mode="same") #Regular correlation
first = np.argmax(autocor)
second = np.argmax(cor)
delta = first-second #amount of sub-pixels to move r1, for the two spectra to overlap
r1 = np.roll(r1, delta) #Move r1
del first, second, autocor, cor
if not move == "y":
delta = 0
# noiseinp = input("Include photon noise? y/n: ")
# noiseinp = "n"
if noiseinp == "y":
rs= np.random.RandomState()
no = rs.poisson(np.mean(image), size=(int(image.shape[0] /sub_pixel), 700))
ni = rs.poisson(np.mean(image2), size=(int(image2.shape[0]/sub_pixel), 700))
no = simfun.read_out(no)
ni = simfun.read_out(ni)
# r1 = scipy.ndimage.filters.uniform_filter1d(r1, size=5000)
# r2 = scipy.ndimage.filters.uniform_filter1d(r2, size=5000)
pos = (disper[0]+slitpos[0])*100.0 #Position of each wavelength on the detector
from scipy.stats import linregress
a, b, r, p, s = linregress(pos, np.arange(wl_ran[0], wl_ran[1])) #Linear regression to find the lambda/pixel correlation
wavelength = a*np.arange(img_size[1]*100.0)+(b)
del a, b, r, p, s,
wave = wavelength[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1] #remove outlying entries, where the spectrum is not present (belo 300 nm, and above 1000)
r1 = r1[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
r2 = r2[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
# plt.plot(in_spec[:,0], in_spec[:,1]/in_spec2[:,1])
print("\nMoving mean filters: ")
from astropy.convolution import convolve, Gaussian1DKernel
r1 = convolve(r1,kernel = Gaussian1DKernel(4246.6)) #Moving Mean filter by convolution. Kernel is Gaussian, input is sigma
# print("Done! \nFirst spectrum complete, continuing to second:\n")
r2 = convolve(r2,kernel = Gaussian1DKernel(4246.6)) #https://docs.astropy.org/en/stable/convolution/
print("MMF done\n")
plt.plot(wave, r1/r2)
return r1, r2, wave, delta
'''
# r5out, r5in, wave = the_thing(image, image2)
# ri = np.load('slit2/r'+str(1)+'/in.npy')
# ro = np.load('slit2/r'+str(1)+'/out.npy')
# rout = simfun.read_out(simfun.bin_sum(ro, sub_pixel))
# rin = simfun.read_out(simfun.bin_sum(ri, sub_pixel))
# pos = (disper[0]+slitpos[0]) #Position of each wavelength on the detector
# from scipy.stats import linregress
# a, b, r, p, s = linregress(pos, np.arange(wl_ran[0], wl_ran[1])) #Linear regression to find the lambda/pixel correlation
# wavelength = a*np.arange(img_size[1])+(b)
# wavelength = wavelength[0::10]
# wave = wavelength[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1] #remove outlying entries, where the spectrum is not present (belo 300 nm, and above 1000)
# r1 = rout[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
# r2 = rin[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
# print("interpolating")
# r1, r2 = int_r(r1, r2, wl_ran) #Interpolate to higher resolution
# print(" done")
# autocor = scipy.signal.correlate(r1, r1, mode="same") #perform autocorr.
# cor = scipy.signal.correlate(r1, r2, mode="same") #Regular correlation
# first = np.argmax(autocor)
# second = np.argmax(cor)
# delta = first-second #amount of sub-pixels to move r1, for the two spectra to overlap
# print("MMF")
# r1 = np.roll(r1, delta) #Move r1
# from astropy.convolution import convolve, Gaussian1DKernel
# r1 = convolve(r1,kernel = Gaussian1DKernel(4246)) #Moving Mean filter by convolution. Kernel is Gaussian, input is sigma
# # print("Done! \nFirst spectrum complete, continuing to second:\n")
# r2 = convolve(r2,kernel = Gaussian1DKernel(4246)) #https://docs.astropy.org/en/stable/convolution/
# r1o = np.load('runs/s2/out-e300-j1_33-s9_406.npy')
# r1i = np.load('runs/s2/in-e300-j1_333-s9_406.npy')
# n1o = simfun.noise(size=r1o.shape, image=r1o)
# n1i = simfun.noise(size=r1i.shape, image=r1i)
# r1i = r1i+n1i
# r1o = r1o+n1o
# del n1o, n1i
# r1o, r1i, wave = the_thing(r1o, r1i)
# r2i = np.load('runs/s2/in-e300-j1_323-s9_406.npy')
# r2o = np.load('runs/s2/out-e300-j1_369-s9_406.npy')
# n2o = simfun.noise(size=r2o.shape, image=r2o)
# n2i = simfun.noise(size=r2i.shape, image=r2i)
# r2i = r2i+n2i
# r2o = r2o+n2o
# del n2o, n2i
# r2o, r2i, wave = the_thing(r2o, r2i)
# r3i = np.load('runs/s2/in-e300-j1_41-s9_406.npy')
# r3o = np.load('runs/s2/out-e300-j1_368-s9_406.npy')
# n3o = simfun.noise(size=r3o.shape, image=r3o)
# n3i = simfun.noise(size=r3i.shape, image=r3i)
# r3i = r3i+n3i
# r3o = r3o+n3o
# del n3o, n3i
# r3o, r3i, wave = the_thing(r3o, r3i)
# ri_s2 = np.vstack([r1i, r2i, r3i])
# ro_s2 = np.vstack([r1o, r2o, r3o])
# del r1i, r2i, r3i
# del r1o, r2o, r3o
# ri_s2_std = np.zeros((ri_s2.shape[1]))
# ro_s2_std = np.zeros((ro_s2.shape[1]))
# for i in range(len(ri_s2_std)):
# ri_s2_std[i] = np.std(ri_s2[:,i])
# ro_s2_std[i] = np.std(ro_s2[:,i])
# ro_s2_mean = np.zeros((ro_s2.shape[1]))
# ri_s2_mean = np.zeros((ri_s2.shape[1]))
# for i in range(ri_s2.shape[1]):
# ri_s2_mean[i] = np.mean(ri_s2[:,i])
# ro_s2_mean[i] = np.mean(ro_s2[:,i])
# plt.figure()
# plt.plot(wave, ro_s2_mean/ri_s2_mean)
# plt.plot(in_spec[:,0], in_spec[:,1]/in_spec2[:,1])
# # plt.fill_between(wave, ((ro_mean-ro_std)/(ri_mean-ri_std)), ((ro_mean+ro_std)/(ri_mean+ri_std)), alpha=0.2)
# plt.legend(["Output", "Input", "1 $\sigma$"], fontsize=14)
# plt.tick_params(labelsize=12)
# plt.xlabel("Wavelength [$nm$]", size=14)
# plt.ylabel("Ratio", size=14)
# plt.grid()
'''
r1i = np.load('runs/s1/in-e300-j1_346-s4_703.npy')
r1o = np.load('runs/s1/out-e300-j1_375-s4_703.npy')
n1o = simfun.noise(size=r1o.shape, image=r1o)
n1i = simfun.noise(size=r1i.shape, image=r1i)
r1i = r1i*CCD+n1i
r1o = r1o*CCD+n1o
del n1o, n1i
r1o, r1i, wave = the_thing(r1o, r1i)
r2i = np.load('runs/s1/in-e300-j1_37-s4_703.npy')
r2o = np.load('runs/s1/out-e300-j1_376-s4_703.npy')
n2o = simfun.noise(size=r2o.shape, image=r2o)
n2i = simfun.noise(size=r2i.shape, image=r2i)
r2i = r2i*CCD+n2i
r2o = r2o*CCD+n2o
del n2o, n2i
r2o, r2i, wave = the_thing(r2o, r2i)
r3i = np.load('runs/s1/in-e300-j1_394-s4_703.npy')
r3o = np.load('runs/s1/out-e300-j1_395-s4_703.npy')
n3o = simfun.noise(size=r3o.shape, image=r3o)
n3i = simfun.noise(size=r3i.shape, image=r3i)
r3i = r3i*CCD+n3i
r3o = r3o*CCD+n3o
del n3o, n3i
r3o, r3i, wave = the_thing(r3o, r3i)
r4i = np.load('runs/s1/in-e300-j1_399-s4_703.npy')
r4o = np.load('runs/s1/out-e300-j1_409-s4_703.npy')
n4o = simfun.noise(size=r4o.shape, image=r4o)
n4i = simfun.noise(size=r4i.shape, image=r4i)
r4i = r4i*CCD+n4i
r4o = r4o*CCD+n4o
del n4o, n4i
r4o, r4i, wave = the_thing(r4o, r4i)
ri = np.vstack([r1i, r2i, r3i, r4i])
ro = np.vstack([r1o, r2o, r3o, r4o])
del r1i, r2i, r3i, r4i
del r1o, r2o, r3o, r4o
ri_std = np.zeros((ri.shape[1]))
ro_std = np.zeros((ro.shape[1]))
for i in range(len(ri_std)):
ri_std[i] = np.std(ri[:,i])
ro_std[i] = np.std(ro[:,i])
ro_mean = np.zeros((ro.shape[1]))
ri_mean = np.zeros((ri.shape[1]))
for i in range(ri.shape[1]):
ri_mean[i] = np.mean(ri[:,i])
ro_mean[i] = np.mean(ro[:,i])
cm = 1/2.54
plt.figure(figsize=(19.5*cm, 13.8*cm))
plt.plot(wave, ro_mean/ri_mean)
plt.plot(in_spec[:,0], in_spec[:,1]/in_spec2[:,1])
plt.fill_between(wave, ((ro_mean-ro_std)/(ri_mean-ri_std)), ((ro_mean+ro_std)/(ri_mean+ri_std)), alpha=0.2)
plt.legend(["Output", "Input", "1 $\sigma$"], fontsize=12, loc="lower center")
plt.tick_params(labelsize=12)
plt.xlabel("Wavelength [$nm$]", size=12)
plt.ylabel("Ratio", size=12)
plt.grid()
plt.tight_layout()
#plt.savefig("../speciale_tex/fig/noise2.png", dpi=300)
=========================================
r1, r2 = int_r(rcout, rcin)
autocor = scipy.signal.correlate(r1, r1, mode="same")
cor = scipy.signal.correlate(r1, r2, mode="same")
first = np.argmax(autocor)
second = np.argmax(cor)
delta = first-second
r1 = np.roll(r1, delta)
from astropy.convolution import convolve, Gaussian1DKernel
r1 = convolve(r1,kernel = Gaussian1DKernel(5000))
r2 = convolve(r2,kernel = Gaussian1DKernel(5000)) #Moving Mean filter by convolution.
# r1 = scipy.ndimage.filters.uniform_filter1d(r1, size=5000)
# r2 = scipy.ndimage.filters.uniform_filter1d(r2, size=5000)
pos = (disper[0]+slitpos[0])*100
from scipy.stats import linregress
a, b, r, p, s = linregress(pos, np.arange(wl_ran[0], wl_ran[1]))
wavelength = a*np.arange(img_size[1]*100)+(b)
wave = wavelength[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
r1 = r1[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
r2 = r2[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-1]
plt.plot(in_spec[:,0], in_spec[:,1]/in_spec2[:,1])
plt.plot(wave, r1/r2)
=========================================
control_in = np.load('runs/in-e300-j1_357-s7_054.npy')
sumcin = simfun.bin_sum(control_in, 10)
rcin = simfun.read_out(sumcin)
control_out = np.load('runs/out-e300-j1_372-s7_054.npy')
sumcout = simfun.bin_sum(control_out, 10)
rcout = simfun.read_out(sumcout)
exp_out = np.load('runs/out-e600-j1_403-s7_054.npy')
sumexpout = simfun.bin_sum(exp_out, 10)
rexpout = simfun.read_out(sumexpout)
exp_in = np.load('runs/in-e600-j1_464-s7_054.npy')
sumexpin = simfun.bin_sum(exp_in, 10)
rexpin = simfun.read_out(sumexpin)
slit_in = np.load('runs/in-e300-j1_4-s49_381.npy')
sumslitin = simfun.bin_sum(slit_in, 10)
rslitin = simfun.read_out(sumslitin)
slit_out = np.load('runs/out-e300-j1_383-s49_381.npy')
sumslitout = simfun.bin_sum(slit_out, 10)
rslitout = simfun.read_out(sumslitout)
jit_out = np.load('runs/out-e300-j2_407-s7_054.npy')
sumjitout = simfun.bin_sum(jit_out, 10)
rjitout = simfun.read_out(sumjitout)
jit_in = np.load('runs/in-e300-j2_853-s7_054.npy')
sumjitin = simfun.bin_sum(jit_in, 10)
rjitin = simfun.read_out(sumjitin)
lam2 = (image2_wl+np.mean([wl_ran[0], wl_ran[1]])*10**(-80))/(image2+10**(-80)) #To find the dispersion wavelength (for calibrating later)
summed2 = simfun.bin_sum(image2, sub_pixel)
r2 = simfun.read_out(summed2)
from scipy.signal import find_peaks # load function to find spectral lines
peak_ref, _ = find_peaks(in_spec[:,1]*(-1), threshold=4, prominence=200, distance=3) #Finds most prominent peaks. Values found empirically (trial and error)
# rr = simfun.read_out(image)
peaks, _ = find_peaks(-1*r1,threshold=2.6e5, distance=3, prominence=500000) #Peaks of the readout. Again, trial and error with the values
from scipy.stats import linregress #Function to find a and b for a linear dispersion function. This of course only works as the dispersion is linear!
a, b, r, p, s = linregress(peaks, in_spec[:,0][peak_ref[1:]])
wavelength = a*np.arange(img_size[1]/10)+(b) #Making the new wavelength array for the readout values.
del a, b, r, p, s
r1 = r1[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-2]
wave = wavelength[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-2]
r2 = r2[np.max(np.where(wavelength<wl_ran[0]))+1:np.min(np.where(wavelength>wl_ran[1]))-2]
'''
"""
===============================================================
Plot bg_spec.png
===============================================================
bg_spec = np.loadtxt(inp.bg_spec)
if not bg_spec.shape[0] == wl_ran[1]-wl_ran[0]: #interpolate if values are missing
bg_spec = simfun.interp(x=bg_spec[:,0], y=bg_spec[:,1], wl_ran=wl_ran, kind='cubic', lowlim=wl_ran[0]-50, uplim=wl_ran[1]+50)
detector_area = (pl_arc_mm*img_size[0]/sub_pixel)*(pl_arc_mm*img_size[1]/sub_pixel) #Collecting area of the detector measured in arcsec^2
bg_spec[:,1] = bg_spec[:,1]*detector_area #Multiply by detector area
plt.figure()
plt.plot(bg_spec[:,0], bg_spec[:,1], linewidth=4)
plt.grid()
plt.xlabel("Wavelength [$nm$]", size=12)
plt.ylabel("Intensity [$photons$ $s^{-1}$ $nm^{-1}$]", size=12)
plt.tick_params(labelsize=12)
plt.tight_layout()
# plt.plot(wave, r1)
# plt.plot(wave, r2)
# plt.plot(in_spec[:,0], in_spec[:,1]*50000)
===============================================================
===============================================================
Plot eta_in.png
===============================================================
foo = 1
eta_in = inp.eta_in
plt.figure()
args = eta_in
for x in args: #takes several input arrays
loaded = np.loadtxt(x)
if not loaded.shape[0] == wl_ran[1]-wl_ran[0]: #if input is not of the correct length, this will interpolate
temp = simfun.interp(loaded[:,0], loaded[:,1], wl_ran=wl_ran, kind='cubic', lowlim=wl_ran[0]-50, uplim=wl_ran[1]+50)[:,1]
else:
temp = loaded
foo = foo * temp
plt.plot(np.arange(300, 1000), temp, linewidth=4)
eta_in=foo
plt.plot(np.arange(300,1000), eta_in, linewidth=4)
plt.legend(["CCD QE", "$\eta$ of telescope optics", "Total Efficiency Curve"], loc="lower center", fontsize=12)
plt.grid()
plt.xlabel("Wavelength [nm]", size=12)
plt.ylabel("Relative spectral throughput, $\eta$", size=12)
===============================================================
===============================================================
Plot ratio_s4_vs_s12.png
===============================================================
fig = plt.figure(figsize=(9,5))
ax1 = fig.add_subplot(211)
ax1.plot(wave, (roall4-riall4)/roall4)
plt.axis([ 265.9, 1035, -0.00333, 0.02540])
plt.ylabel("Normalized ratio", size=13)
plt.tick_params(labelbottom=False, labelsize=12)
plt.grid()
color=['tab:blue', 'tab:orange', 'tab:green', 'tab:red', 'tab:purple', 'tab:pink', 'tab:gray', 'tab:olive', 'tab:cyan', 'tab:brown']
ax2 = fig.add_subplot(212)
for i in range(10):
ax2.plot(wave, (roall12[:,i]-riall12[:,i])/roall12[:,i], color=color[i])
plt.axis([ 265.9, 1035, -0.00333, 0.02540])
plt.xlabel("Wavelength [nm]", size=13)
plt.ylabel("Normalized ratio", size=13)
plt.tick_params(labelsize=12)
plt.grid()
plt.tight_layout()
===============================================================
Plot ratio_noise.png
===============================================================
rs= np.random.RandomState()
from datetime import datetime
print(str(datetime.now()))
rieccd = np.zeros((700000, 10))
roeccd = np.zeros((700000, 10))
for i in range(1,11):
ri = np.load('e300_bigpsf_noslit/r'+str(i)+'/in.npy')
ro = np.load('e300_bigpsf_noslit/r'+str(i)+'/out.npy')
no = rs.poisson(np.mean(ro), size=(1000, 10240))
ni = rs.poisson(np.mean(ri), size=(1000, 10240))
ro, ri, wave, delta = the_thing((ro+no)*CCD, (ri+ni)*CCD)
roeccd[:,i-1] = ro
rieccd[:,i-1] = ri
ro_meaneccd = np.zeros((ro.shape[0]))
ri_meaneccd = np.zeros((ri.shape[0]))
for i in range(ri.shape[0]):
ri_meaneccd[i] = np.mean(rieccd[i,:])
ro_meaneccd[i] = np.mean(roeccd[i,:])
print(str(datetime.now()))
plt.figure(figsize=(9,5))
plt.plot(wave, (ro_meane-ri_meane)/ro_meane, color="tab:red", linewidth=3, label="Larger PSF")
#plt.plot(wave, (ro_meaneccd-ri_meaneccd)/ro_meaneccd, color="tab:green", linewidth=3, label="CCD included")
plt.plot(wave, (ro_mean_ccdnoise-ri_mean_ccdnoise)/ro_mean_ccdnoise, color="tab:blue", linewidth=3, label="Noise and CCD included")
plt.plot(in_spec[:,0], (in_spec[:,1]-in_spec2[:,1])/in_spec[:,1], color="tab:orange", linewidth=3, label="Input spectra ratio")
#plt.axis([ 265.9, 1035, -0.00333, 0.02540])
plt.xlabel("Wavelength [nm]", size=13)
plt.ylabel("Transit depth", size=13)
plt.grid()
plt.tick_params(labelsize=12)
plt.legend(fontsize=12)
plt.tight_layout()
===============================================================
Plot ratio_noise_err.png
===============================================================
err = np.zeros((700000))
for i in range(700000):
err[i] = np.sqrt((1/10)* ((np.std(ro_ccdnoise[i,:])**2)/ro_mean_ccdnoise[i]**2 + (np.std(ri_ccdnoise[i,:])**2)/(ri_mean_ccdnoise[i]**2)))
div = (ro_mean_ccdnoise-ri_mean_ccdnoise)/ro_mean_ccdnoise
plt.figure(figsize=(9,5))
plt.plot(wave, (ro_meane-ri_meane)/ro_meane, color="tab:red", linewidth=3, label="Larger PSF")
#plt.plot(wave, (ro_meaneccd-ri_meaneccd)/ro_meaneccd, color="tab:green", linewidth=3, label="CCD included")
plt.plot(wave, (ro_mean_ccdnoise-ri_mean_ccdnoise)/ro_mean_ccdnoise, color="tab:blue", linewidth=3, label="Noise and CCD included")
plt.plot(in_spec[:,0], (in_spec[:,1]-in_spec2[:,1])/in_spec[:,1], color="tab:orange", linewidth=3, label="Input spectra ratio")
plt.fill_between(wave, div-err, div+err, color='k', alpha=0.2, label="Uncertainty")
#plt.plot(wave, div+err, 'k--', label="Uncertainty")
#plt.axis([ 265.9, 1035, -0.00333, 0.02540])
plt.xlabel("Wavelength [nm]", size=13)
plt.ylabel("Transit depth", size=13)
plt.grid()
plt.tick_params(labelsize=12)
plt.legend(fontsize=12)
plt.tight_layout()
"""
# print('Next image: \n')
# x_j, y_j = simfun.jitter(steps=int(exp*step), gain=0.4, amplitude_act=0.03, amplitude_sens=0.03) #New jitter, will have epx*step length
# jitter = np.stack((x_j, y_j), axis=-1)
# jitter =simfun.jitter_im(jitter[:,0], jitter[:,1], psf_size=(psf[:,:,0].shape[0], psf[:,:,0].shape[0]))
# image2, image2_wl=simfun.spatial_dispersion(wl_endpoints=wl_ran, jit_img=jitter, psf_img=psf, pos=slitpos, image_size=img_size,
# dispersion=disper, eff=spec_eff2, mask_img=mask, steps=1, plot='n')
# # image_all = image + background
# img1 = simfun.bin_sum(image1, bin_size=sub_pixel)
# img2 = simfun.bin_sum(image2, bin_size=sub_pixel)
# read_wo = simfun.read_out(img_summed,1)
# plt.plot(read_wo)
# img2_summed = simfun.bin_sum(image2, bin_size=sub_pixel)
# read_w = simfun.read_out(img2_summed,1)
# plt.plot(read_w)
# plt.plot(np.arange(0, 750)+115, in_spec2[:,1]/in_spec[:,1])
# plt.plot(read_no/read_w)
# plt.axis((105, 770, 0.989, 0.98932))
'''
peak_ref, _ = find_peaks(in_spec[:,1]*(-1), threshold=4, prominence=200, distance=3)
plt.figure()
plt.plot(in_spec[:,0], in_spec[:,1])
plt.plot(in_spec[:,0][peak_ref], in_spec[:,1][peak_ref], "x")
rr = simfun.read_out(image)
peaks, _ = find_peaks(-1*rr, distance=20, prominence=500000)
plt.figure()
plt.plot(rr)
plt.plot(peaks, rr[peaks], 'x')
from scipy.stats import linregress
a, b, r, p, s = linregress(peaks, in_spec[:,0][peak_ref])
wave = a*np.arange(0, 10240)+b
plt.figure()
plt.plot(wave, rr)
#DET VAR HERTIL DU NÅEDE MED PLOTS. Der skal laves en del. Start med at loade de iamge-filer du lavede i går
# figsize=(6.2, 9.1)
fig = plt.figure()
gs1 = fig.add_gridspec(nrows=6, ncols=2)
ax1 = fig.add_subplot(gs1[0,0])
ax2 = fig.add_subplot(gs1[1,0])
ax3 = fig.add_subplot(gs1[2,0])
ax4 = fig.add_subplot(gs1[3,0])
ax5 = fig.add_subplot(gs1[4,0])
ax6 = fig.add_subplot(gs1[5,0])
ax1.imshow(image1)
ax1.tick_params(labelcolor='w', top=False, bottom=False, left=False, right=False)
ax2.imshow(image3)
ax2.tick_params(labelcolor='w', top=False, bottom=False, left=False, right=False)
ax3.imshow(image4)
ax3.tick_params(labelcolor='w', top=False, bottom=False, left=False, right=False)
ax4.imshow(image5)
ax4.tick_params(labelcolor='w', top=False, bottom=False, left=False, right=False)
ax5.imshow(image6)
ax5.tick_params(labelcolor='w', top=False, bottom=False, left=False, right=False)
ax6.imshow(image7)
ax6.tick_params(labelcolor='w', top=False, bottom=False, left=False, right=False)
ax11 = fig.add_subplot(gs1[0,1])
ax11.plot(image1[125,:], 'r-')
ax11.yaxis.tick_right()
ax11.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False) # labels along the bottom edge are off
ax11.tick_params(labelsize=12)
ax1.plot(np.linspace(0,999, 1000), np.ones(1000)*125, 'r-')
ax21 = fig.add_subplot(gs1[1,1])
ax21.plot(image3[125,:], 'r-')
ax21.yaxis.tick_right()
ax21.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False) # labels along the bottom edge are off
ax21.tick_params(labelsize=12)
ax2.plot(np.linspace(0,999, 1000), np.ones(1000)*125, 'r-')
ax31 = fig.add_subplot(gs1[2,1])
ax31.plot(image4[125,:], 'r-')
ax31.axis([291, 530, 88.57, 88.75])
ax31.yaxis.tick_right()
ax31.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False) # labels along the bottom edge are off
ax31.tick_params(labelsize=12)
ax3.plot(np.linspace(0,999, 1000), np.ones(1000)*125, 'r-')
ax41 = fig.add_subplot(gs1[3,1])
ax41.plot(image5[:,100], 'r-')
ax41.yaxis.tick_right()
ax41.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False) # labels along the bottom edge are off
ax41.tick_params(labelsize=12)
ax4.plot(np.ones(1000)*125, np.linspace(0,249, 1000), 'r-')
ax51 = fig.add_subplot(gs1[4,1])
ax51.plot(image6[:,100], 'r-')
ax51.yaxis.tick_right()
ax51.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False) # labels along the bottom edge are off
ax51.tick_params(labelsize=12)
ax5.plot(np.ones(1000)*125, np.linspace(0,249, 1000), 'r-')
ax61 = fig.add_subplot(gs1[5,1])
ax61.plot(image7[150, :], 'r-')
ax61.yaxis.tick_right()
ax61.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False) # labels along the bottom edge are off
ax61.tick_params(labelsize=12)
ax6.plot(np.linspace(0,999, 1000), np.ones(1000)*150, 'r-')
ax1.text(-65, 140, "A", size=18)
ax2.text(-65, 140, "B", size=18)
ax3.text(-65, 140, "C", size=18)
ax4.text(-65, 140, "D", size=18)
ax5.text(-65, 140, "E", size=18)
ax6.text(-65, 140, "F", size=18)
# ax7 = fig.add_subplot(gs1[6,0], autoscale_on=False)
# ax7.plot(simfun.read_out(image7))
fig = plt.figure()
ax = fig.add_subplot(111, frameon=False)
ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
ax1 = fig.add_subplot(211)
ax2 = fig.add_subplot(212, sharex=ax1)
temp = simfun.interp(np.loadtxt('optics.txt')[:,0], np.loadtxt('optics.txt')[:,1], wl_ran=wl_ran, kind="cubic", lowlim=wl_ran[0]-50, uplim=wl_ran[1]+50)
ax1.plot(np.loadtxt('optics.txt')[:,0],np.loadtxt('optics.txt')[:,1], label="Input")
ax1.plot(temp[:,0], temp[:,1], label="Interpolated")
ax1.tick_params(length=6, width=1, labelsize=12)
ax1.axis((300, 1337, 0.853404, 1))
ax1.grid()
ax1.legend(fontsize=12, loc=4)
temp = simfun.interp(np.loadtxt('QE.txt')[:,0], np.loadtxt('QE.txt')[:,1], wl_ran=[400,1250], kind="quadratic", lowlim=400, uplim=1250)
ax2.plot(np.loadtxt('QE.txt')[:,0],np.loadtxt('QE.txt')[:,1], label="Input")
ax2.plot(temp[:,0],temp[:,1], label="Interpolated")
ax2.tick_params(length=6, width=1, labelsize=12)
ax2.grid()
ax2.legend(fontsize=12, loc=1)
ax2.set_xlabel("Wavelength [nm]", size=12)
#ax.text(0,0.5,"Spectral throughput, $\eta$")
ax.text(-0.12, 0.5, 'Spectral throughput, $\eta$', va='center', rotation='vertical', size=12)
plt.setp(ax1.get_xticklabels(), visible=False)
# x_j, y_j = simfun.jitter(steps=int(3000), gain=0.7, amplitude_act=0.15, amplitude_sens=0.15)
plt.figure(figsize=(6.4, 6.4))
plt.plot(x_j, y_j)
plt.plot(np.linspace(-5,5,100), np.ones(100)*-5, 'k-')
plt.plot(np.linspace(-5,5,100), np.ones(100)*5, 'k-')
plt.plot(np.ones(100)*5, np.linspace(-5,5,100), 'k-')
plt.plot(np.ones(100)*-5, np.linspace(-5,5,100), 'k-')
plt.grid()
plt.xlabel('Sub-pixel', size=12)
plt.ylabel('Sub-pixel', size=12)
plt.axis((-12,12,-12,12))
'''
|
[
"numpy.load",
"HIDRA.setup",
"HIDRA.transmission_spec_func",
"matplotlib.pyplot.plot",
"HIDRA.sinusoidal",
"HIDRA.prep_func",
"numpy.zeros",
"numpy.mean",
"random.randrange",
"numpy.random.normal",
"scipy.interpolate.interp1d",
"HIDRA.spatial_dispersion",
"datetime.datetime.now",
"numpy.sqrt"
] |
[((1284, 1298), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1296, 1298), False, 'from datetime import datetime\n'), ((4277, 4293), 'HIDRA.setup', 'HIDRA.setup', (['inp'], {}), '(inp)\n', (4288, 4293), False, 'import HIDRA\n'), ((4320, 4339), 'numpy.load', 'np.load', (['inp.in_CCD'], {}), '(inp.in_CCD)\n', (4327, 4339), True, 'import numpy as np\n'), ((4360, 4550), 'HIDRA.spatial_dispersion', 'HIDRA.spatial_dispersion', ([], {'wl_endpoints': 'wl_ran', 'jit_img': 'jitter', 'psf_ends': '[15, 45]', 'pos': 'slitpos', 'image_size': 'img_size', 'dispersion': 'disper', 'eff': 'spec_eff2', 'mask_img': 'mask', 'steps': '(1)', 'plot': '"""n"""'}), "(wl_endpoints=wl_ran, jit_img=jitter, psf_ends=[15,\n 45], pos=slitpos, image_size=img_size, dispersion=disper, eff=spec_eff2,\n mask_img=mask, steps=1, plot='n')\n", (4384, 4550), False, 'import HIDRA\n'), ((4591, 4638), 'HIDRA.prep_func', 'HIDRA.prep_func', (['image2', 'CCD', 'sub_pixel', 'wl_ran'], {}), '(image2, CCD, sub_pixel, wl_ran)\n', (4606, 4638), False, 'import HIDRA\n'), ((4662, 4751), 'HIDRA.transmission_spec_func', 'HIDRA.transmission_spec_func', (['spectrum1', 'spectrum2', 'wl_ran', 'disper', 'slitpos', 'img_size'], {}), '(spectrum1, spectrum2, wl_ran, disper, slitpos,\n img_size)\n', (4690, 4751), False, 'import HIDRA\n'), ((4931, 4961), 'matplotlib.pyplot.plot', 'plt.plot', (['wave', '((ro - ri) / ro)'], {}), '(wave, (ro - ri) / ro)\n', (4939, 4961), True, 'import matplotlib.pyplot as plt\n'), ((499, 548), 'scipy.interpolate.interp1d', 'interp1d', (['x', 'r1'], {'kind': '(3)', 'fill_value': '"""extrapolate"""'}), "(x, r1, kind=3, fill_value='extrapolate')\n", (507, 548), False, 'from scipy.interpolate import interp1d\n'), ((558, 607), 'scipy.interpolate.interp1d', 'interp1d', (['x', 'r2'], {'kind': '(3)', 'fill_value': '"""extrapolate"""'}), "(x, r2, kind=3, fill_value='extrapolate')\n", (566, 607), False, 'from scipy.interpolate import interp1d\n'), ((1661, 1677), 'HIDRA.setup', 'HIDRA.setup', (['inp'], {}), '(inp)\n', (1672, 1677), False, 'import HIDRA\n'), ((2239, 2325), 'HIDRA.sinusoidal', 'HIDRA.sinusoidal', ([], {'size': '(inp.exp * inp.step)', 'frequency': 'freq', 'amplitude': 'ampl', 'phase': '(1)'}), '(size=inp.exp * inp.step, frequency=freq, amplitude=ampl,\n phase=1)\n', (2255, 2325), False, 'import HIDRA\n'), ((2698, 2784), 'HIDRA.sinusoidal', 'HIDRA.sinusoidal', ([], {'size': '(inp.exp * inp.step)', 'frequency': 'freq', 'amplitude': 'ampl', 'phase': '(1)'}), '(size=inp.exp * inp.step, frequency=freq, amplitude=ampl,\n phase=1)\n', (2714, 2784), False, 'import HIDRA\n'), ((3510, 3529), 'numpy.load', 'np.load', (['inp.in_CCD'], {}), '(inp.in_CCD)\n', (3517, 3529), True, 'import numpy as np\n'), ((3629, 3818), 'HIDRA.spatial_dispersion', 'HIDRA.spatial_dispersion', ([], {'wl_endpoints': 'wl_ran', 'jit_img': 'jitter', 'psf_ends': '[15, 45]', 'pos': 'slitpos', 'image_size': 'img_size', 'dispersion': 'disper', 'eff': 'spec_eff', 'mask_img': 'mask', 'steps': '(1)', 'plot': '"""n"""'}), "(wl_endpoints=wl_ran, jit_img=jitter, psf_ends=[15,\n 45], pos=slitpos, image_size=img_size, dispersion=disper, eff=spec_eff,\n mask_img=mask, steps=1, plot='n')\n", (3653, 3818), False, 'import HIDRA\n'), ((3872, 3919), 'HIDRA.prep_func', 'HIDRA.prep_func', (['image1', 'CCD', 'sub_pixel', 'wl_ran'], {}), '(image1, CCD, sub_pixel, wl_ran)\n', (3887, 3919), False, 'import HIDRA\n'), ((18981, 18998), 'numpy.zeros', 'np.zeros', (['x.shape'], {}), '(x.shape)\n', (18989, 18998), True, 'import numpy as np\n'), ((339, 354), 'numpy.mean', 'np.mean', (['(x ** 2)'], {}), '(x ** 2)\n', (346, 354), True, 'import numpy as np\n'), ((5367, 5381), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5379, 5381), False, 'from datetime import datetime\n'), ((1875, 1900), 'random.randrange', 'random.randrange', (['(0)', '(5)', '(1)'], {}), '(0, 5, 1)\n', (1891, 1900), False, 'import random\n'), ((1907, 1933), 'random.randrange', 'random.randrange', (['(0)', '(10)', '(1)'], {}), '(0, 10, 1)\n', (1923, 1933), False, 'import random\n'), ((1940, 1967), 'random.randrange', 'random.randrange', (['(0)', '(100)', '(1)'], {}), '(0, 100, 1)\n', (1956, 1967), False, 'import random\n'), ((1974, 2002), 'random.randrange', 'random.randrange', (['(0)', '(1000)', '(1)'], {}), '(0, 1000, 1)\n', (1990, 2002), False, 'import random\n'), ((2009, 2038), 'random.randrange', 'random.randrange', (['(0)', '(10000)', '(1)'], {}), '(0, 10000, 1)\n', (2025, 2038), False, 'import random\n'), ((2057, 2082), 'random.randrange', 'random.randrange', (['(0)', '(1)', '(1)'], {}), '(0, 1, 1)\n', (2073, 2082), False, 'import random\n'), ((2089, 2115), 'random.randrange', 'random.randrange', (['(0)', '(10)', '(1)'], {}), '(0, 10, 1)\n', (2105, 2115), False, 'import random\n'), ((2122, 2149), 'random.randrange', 'random.randrange', (['(0)', '(100)', '(1)'], {}), '(0, 100, 1)\n', (2138, 2149), False, 'import random\n'), ((2156, 2184), 'random.randrange', 'random.randrange', (['(0)', '(1000)', '(1)'], {}), '(0, 1000, 1)\n', (2172, 2184), False, 'import random\n'), ((2191, 2220), 'random.randrange', 'random.randrange', (['(0)', '(10000)', '(1)'], {}), '(0, 10000, 1)\n', (2207, 2220), False, 'import random\n'), ((2334, 2359), 'random.randrange', 'random.randrange', (['(0)', '(5)', '(1)'], {}), '(0, 5, 1)\n', (2350, 2359), False, 'import random\n'), ((2366, 2392), 'random.randrange', 'random.randrange', (['(0)', '(10)', '(1)'], {}), '(0, 10, 1)\n', (2382, 2392), False, 'import random\n'), ((2399, 2426), 'random.randrange', 'random.randrange', (['(0)', '(100)', '(1)'], {}), '(0, 100, 1)\n', (2415, 2426), False, 'import random\n'), ((2433, 2461), 'random.randrange', 'random.randrange', (['(0)', '(1000)', '(1)'], {}), '(0, 1000, 1)\n', (2449, 2461), False, 'import random\n'), ((2468, 2497), 'random.randrange', 'random.randrange', (['(0)', '(10000)', '(1)'], {}), '(0, 10000, 1)\n', (2484, 2497), False, 'import random\n'), ((2516, 2541), 'random.randrange', 'random.randrange', (['(0)', '(1)', '(1)'], {}), '(0, 1, 1)\n', (2532, 2541), False, 'import random\n'), ((2548, 2574), 'random.randrange', 'random.randrange', (['(0)', '(10)', '(1)'], {}), '(0, 10, 1)\n', (2564, 2574), False, 'import random\n'), ((2581, 2608), 'random.randrange', 'random.randrange', (['(0)', '(100)', '(1)'], {}), '(0, 100, 1)\n', (2597, 2608), False, 'import random\n'), ((2615, 2643), 'random.randrange', 'random.randrange', (['(0)', '(1000)', '(1)'], {}), '(0, 1000, 1)\n', (2631, 2643), False, 'import random\n'), ((2650, 2679), 'random.randrange', 'random.randrange', (['(0)', '(10000)', '(1)'], {}), '(0, 10000, 1)\n', (2666, 2679), False, 'import random\n'), ((19116, 19138), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)'], {}), '(0, 1)\n', (19132, 19138), True, 'import numpy as np\n'), ((19095, 19111), 'numpy.sqrt', 'np.sqrt', (['x[i, j]'], {}), '(x[i, j])\n', (19102, 19111), True, 'import numpy as np\n')]
|
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import training
from chainer.training import extensions
import numpy as np
import random
# 前処理
# 書き方はこれを参考にした:https://github.com/yasunorikudo/chainer-DenseNet
class Preprocess(chainer.dataset.DatasetMixin):
def __init__(self, pairs):
self.pairs = pairs
def __len__(self):
return len(self.pairs)
def get_example(self, i):
x, y = self.pairs[i]
# label
y = np.array(y, dtype=np.int32)
# random crop
pad_x = np.zeros((3, 40, 40), dtype=np.float32)
pad_x[:, 4:36, 4:36] = x
top = random.randint(0, 8)
left = random.randint(0, 8)
x = pad_x[:, top:top+32, left:left+32]
# horizontal flip
if random.randint(0, 1):
x = x[:, :, ::-1]
return x, y
# Model
# ResBlock単体
class ResBlock(chainer.Chain):
# BN->ReLU->Conv->BN->ReLU->Conv をショートカットさせる(Kaimingらの研究による)
def __init__(self, channels):
super().__init__()
self.channels = channels
w = chainer.initializers.GlorotNormal()
with self.init_scope():
self.bn1 = L.BatchNormalization(channels)
self.conv1 = L.Convolution2D(None, channels, ksize=3, pad=1, initialW=w)
self.bn2 = L.BatchNormalization(channels)
self.conv2 = L.Convolution2D(None, channels, ksize=3, pad=1, initialW=w)
def __call__(self, x):
out = self.conv1(F.relu(self.bn1(x)))
out = self.conv2(F.relu(self.bn2(out)))
return out + x
# オリジナルの論文に従って、サブサンプリングにPoolingではなくstride=2のConvを使う
class Subsumpling(chainer.Chain):
def __init__(self, output_channels):
super().__init__()
w = chainer.initializers.GlorotNormal()
with self.init_scope():
self.conv = L.Convolution2D(None, output_channels, ksize=1, stride=2, initialW=w)
def __call__(self, x):
return self.conv(x)
class ResNet(chainer.Chain):
def __init__(self, n):
super().__init__()
self.n = n
w = chainer.initializers.GlorotNormal()
with self.init_scope():
self.conv1 = L.Convolution2D(3, 16, ksize=3, pad=1, initialW=w) #3->16
self.rbs1 = self._make_resblocks(16, n)
self.pool1 = Subsumpling(32)
self.rbs2 = self._make_resblocks(32, n)
self.pool2 = Subsumpling(64)
self.rbs3 = self._make_resblocks(64, n)
self.fc = L.Linear(None, 10, initialW=w)
def _make_resblocks(self, channels, count):
layers = [ResBlock(channels) for i in range(count)]
return chainer.Sequential(*layers)
def __call__(self, x):
out = self.conv1(x)
out = self.rbs1(out)
out = self.pool1(out)
out = self.rbs2(out)
out = self.pool2(out)
out = self.rbs3(out)
out = F.average_pooling_2d(out, ksize=8) #最後は(8,8)
out = self.fc(out)
return out
def main(n, nb_epochs):
train, test = chainer.datasets.get_cifar10()
train = Preprocess(train)
test = Preprocess(test)
train_iter = chainer.iterators.SerialIterator(train, 128)
test_iter = chainer.iterators.SerialIterator(test, 100, repeat=False, shuffle=False)
### Parameters
device = 0 # -1:CPU, 0:GPU
###
net = chainer.links.Classifier(ResNet(n))
optimizer = chainer.optimizers.MomentumSGD(lr=0.01, momentum=0.9)
optimizer.setup(net)
optimizer.add_hook(chainer.optimizer.WeightDecay(0.0005))
updater = training.StandardUpdater(train_iter, optimizer, device=device)
trainer = training.Trainer(updater, (nb_epochs, "epoch"), out=f"chainer_n{n}")
val_interval = (1, "epoch")
log_interval = (1, "epoch")
# 学習率調整
def lr_shift():
if updater.epoch == int(nb_epochs*0.5) or updater.epoch == int(nb_epochs*0.75):
optimizer.lr *= 0.1
return optimizer.lr
trainer.extend(extensions.Evaluator(test_iter, net, device=device), trigger=val_interval)
trainer.extend(extensions.observe_value(
"lr", lambda _: lr_shift()), trigger=(1, "epoch"))
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.PrintReport([
'elapsed_time', 'epoch', 'iteration', 'main/loss', 'validation/main/loss',
'main/accuracy', 'validation/main/accuracy', 'lr',
]), trigger=log_interval)
trainer.extend(extensions.ProgressBar(update_interval=50))
trainer.run()
if __name__ == "__main__":
main(3, 1)
|
[
"chainer.iterators.SerialIterator",
"chainer.training.extensions.LogReport",
"chainer.links.Linear",
"random.randint",
"chainer.initializers.GlorotNormal",
"chainer.training.extensions.Evaluator",
"chainer.functions.average_pooling_2d",
"chainer.Sequential",
"chainer.training.StandardUpdater",
"chainer.links.Convolution2D",
"chainer.optimizer.WeightDecay",
"chainer.training.extensions.PrintReport",
"chainer.datasets.get_cifar10",
"chainer.training.Trainer",
"chainer.optimizers.MomentumSGD",
"numpy.zeros",
"chainer.training.extensions.ProgressBar",
"numpy.array",
"chainer.links.BatchNormalization"
] |
[((3139, 3169), 'chainer.datasets.get_cifar10', 'chainer.datasets.get_cifar10', ([], {}), '()\n', (3167, 3169), False, 'import chainer\n'), ((3248, 3292), 'chainer.iterators.SerialIterator', 'chainer.iterators.SerialIterator', (['train', '(128)'], {}), '(train, 128)\n', (3280, 3292), False, 'import chainer\n'), ((3310, 3382), 'chainer.iterators.SerialIterator', 'chainer.iterators.SerialIterator', (['test', '(100)'], {'repeat': '(False)', 'shuffle': '(False)'}), '(test, 100, repeat=False, shuffle=False)\n', (3342, 3382), False, 'import chainer\n'), ((3514, 3567), 'chainer.optimizers.MomentumSGD', 'chainer.optimizers.MomentumSGD', ([], {'lr': '(0.01)', 'momentum': '(0.9)'}), '(lr=0.01, momentum=0.9)\n', (3544, 3567), False, 'import chainer\n'), ((3674, 3736), 'chainer.training.StandardUpdater', 'training.StandardUpdater', (['train_iter', 'optimizer'], {'device': 'device'}), '(train_iter, optimizer, device=device)\n', (3698, 3736), False, 'from chainer import training\n'), ((3752, 3820), 'chainer.training.Trainer', 'training.Trainer', (['updater', "(nb_epochs, 'epoch')"], {'out': 'f"""chainer_n{n}"""'}), "(updater, (nb_epochs, 'epoch'), out=f'chainer_n{n}')\n", (3768, 3820), False, 'from chainer import training\n'), ((515, 542), 'numpy.array', 'np.array', (['y'], {'dtype': 'np.int32'}), '(y, dtype=np.int32)\n', (523, 542), True, 'import numpy as np\n'), ((585, 624), 'numpy.zeros', 'np.zeros', (['(3, 40, 40)'], {'dtype': 'np.float32'}), '((3, 40, 40), dtype=np.float32)\n', (593, 624), True, 'import numpy as np\n'), ((674, 694), 'random.randint', 'random.randint', (['(0)', '(8)'], {}), '(0, 8)\n', (688, 694), False, 'import random\n'), ((711, 731), 'random.randint', 'random.randint', (['(0)', '(8)'], {}), '(0, 8)\n', (725, 731), False, 'import random\n'), ((819, 839), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (833, 839), False, 'import random\n'), ((1128, 1163), 'chainer.initializers.GlorotNormal', 'chainer.initializers.GlorotNormal', ([], {}), '()\n', (1161, 1163), False, 'import chainer\n'), ((1812, 1847), 'chainer.initializers.GlorotNormal', 'chainer.initializers.GlorotNormal', ([], {}), '()\n', (1845, 1847), False, 'import chainer\n'), ((2166, 2201), 'chainer.initializers.GlorotNormal', 'chainer.initializers.GlorotNormal', ([], {}), '()\n', (2199, 2201), False, 'import chainer\n'), ((2746, 2773), 'chainer.Sequential', 'chainer.Sequential', (['*layers'], {}), '(*layers)\n', (2764, 2773), False, 'import chainer\n'), ((3000, 3034), 'chainer.functions.average_pooling_2d', 'F.average_pooling_2d', (['out'], {'ksize': '(8)'}), '(out, ksize=8)\n', (3020, 3034), True, 'import chainer.functions as F\n'), ((3618, 3655), 'chainer.optimizer.WeightDecay', 'chainer.optimizer.WeightDecay', (['(0.0005)'], {}), '(0.0005)\n', (3647, 3655), False, 'import chainer\n'), ((4098, 4149), 'chainer.training.extensions.Evaluator', 'extensions.Evaluator', (['test_iter', 'net'], {'device': 'device'}), '(test_iter, net, device=device)\n', (4118, 4149), False, 'from chainer.training import extensions\n'), ((4299, 4341), 'chainer.training.extensions.LogReport', 'extensions.LogReport', ([], {'trigger': 'log_interval'}), '(trigger=log_interval)\n', (4319, 4341), False, 'from chainer.training import extensions\n'), ((4363, 4517), 'chainer.training.extensions.PrintReport', 'extensions.PrintReport', (["['elapsed_time', 'epoch', 'iteration', 'main/loss', 'validation/main/loss',\n 'main/accuracy', 'validation/main/accuracy', 'lr']"], {}), "(['elapsed_time', 'epoch', 'iteration', 'main/loss',\n 'validation/main/loss', 'main/accuracy', 'validation/main/accuracy', 'lr'])\n", (4385, 4517), False, 'from chainer.training import extensions\n'), ((4583, 4625), 'chainer.training.extensions.ProgressBar', 'extensions.ProgressBar', ([], {'update_interval': '(50)'}), '(update_interval=50)\n', (4605, 4625), False, 'from chainer.training import extensions\n'), ((1231, 1261), 'chainer.links.BatchNormalization', 'L.BatchNormalization', (['channels'], {}), '(channels)\n', (1251, 1261), True, 'import chainer.links as L\n'), ((1288, 1347), 'chainer.links.Convolution2D', 'L.Convolution2D', (['None', 'channels'], {'ksize': '(3)', 'pad': '(1)', 'initialW': 'w'}), '(None, channels, ksize=3, pad=1, initialW=w)\n', (1303, 1347), True, 'import chainer.links as L\n'), ((1372, 1402), 'chainer.links.BatchNormalization', 'L.BatchNormalization', (['channels'], {}), '(channels)\n', (1392, 1402), True, 'import chainer.links as L\n'), ((1429, 1488), 'chainer.links.Convolution2D', 'L.Convolution2D', (['None', 'channels'], {'ksize': '(3)', 'pad': '(1)', 'initialW': 'w'}), '(None, channels, ksize=3, pad=1, initialW=w)\n', (1444, 1488), True, 'import chainer.links as L\n'), ((1916, 1985), 'chainer.links.Convolution2D', 'L.Convolution2D', (['None', 'output_channels'], {'ksize': '(1)', 'stride': '(2)', 'initialW': 'w'}), '(None, output_channels, ksize=1, stride=2, initialW=w)\n', (1931, 1985), True, 'import chainer.links as L\n'), ((2263, 2313), 'chainer.links.Convolution2D', 'L.Convolution2D', (['(3)', '(16)'], {'ksize': '(3)', 'pad': '(1)', 'initialW': 'w'}), '(3, 16, ksize=3, pad=1, initialW=w)\n', (2278, 2313), True, 'import chainer.links as L\n'), ((2587, 2617), 'chainer.links.Linear', 'L.Linear', (['None', '(10)'], {'initialW': 'w'}), '(None, 10, initialW=w)\n', (2595, 2617), True, 'import chainer.links as L\n')]
|
#!/usr/bin/env python
import numpy as np
from ropy.robot.ETS import ETS
from pathlib import Path
class PandaURDF(ETS):
def __init__(self):
fpath = Path('ropy') / 'models' / 'xacro' / 'panda' / 'robots'
fname = 'panda_arm_hand.urdf.xacro'
abspath = fpath.absolute()
args = super(PandaURDF, self).urdf_to_ets_args(
(abspath / fname).as_posix())
super(PandaURDF, self).__init__(
args[0],
name=args[1])
self.manufacturer = '<NAME>'
self.ee_link = self.ets[9]
self._qz = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0])
self._qr = np.array([0, -0.3, 0, -2.2, 0, 2.0, np.pi/4, 0, 0])
# self._qz = np.array([0, 0, 0, 0, 0, 0, 0])
# self._qr = np.array([0, -0.3, 0, -2.2, 0, 2.0, np.pi/4])
for link in self.ets:
for gi in link.geometry:
if gi.filename[0] != '/':
gi.filename = (abspath / gi.filename).as_posix()
# print(link.name)
# print(link.geometry)
@property
def qz(self):
return self._qz
@property
def qr(self):
return self._qr
|
[
"pathlib.Path",
"numpy.array"
] |
[((579, 616), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0])\n', (587, 616), True, 'import numpy as np\n'), ((636, 689), 'numpy.array', 'np.array', (['[0, -0.3, 0, -2.2, 0, 2.0, np.pi / 4, 0, 0]'], {}), '([0, -0.3, 0, -2.2, 0, 2.0, np.pi / 4, 0, 0])\n', (644, 689), True, 'import numpy as np\n'), ((164, 176), 'pathlib.Path', 'Path', (['"""ropy"""'], {}), "('ropy')\n", (168, 176), False, 'from pathlib import Path\n')]
|
# from @有一种悲伤叫颓废
"""
注:
1. 主要用来求三角剖分和维诺图,算法的思路可以看我的这期视频:https://www.bilibili.com/video/BV1Ck4y1z7VT
2. 时间复杂度O(nlogn),一般情况应该够用,如发现bug请联系颓废
3. 只需导入两个函数:DelaunayTrianglation(求德劳内三角剖分), Voronoi(求维诺图)
"""
import numpy as np
from manimlib.mobject.types.vectorized_mobject import VGroup
from manimlib.constants import PI
from manimlib.utils.config_ops import digest_config
from manimlib.mobject.geometry import Dot, Line, Polygon
from manimlib.scene.scene import Scene
from manimlib.utils.space_ops import normalize
#import time
#import math
#from manimlib.imports import *
#from manim_sandbox.utils.imports import *
# 以下比例建议2不要乱改,精度大,或者小,都有可能出bug
# 小于误差则等
ev = np.exp(1)**PI/1000000000
ev_sq = ev**2
# 无穷大
Infinity = 333
# 判断两个点是否相等,小于误差的平方,则相等,O(1)
def point_is_equal(p, q):
p, q = np.array(p), np.array(q)
# 两点距离的平方小于误差的平方,则相等
if np.dot(q-p, q-p) < ev_sq:
return True
return False
# b在向量pq左为正,右为负,O(1)
def cross2(p, q, b):
'''
叉积公式
\begin{align}
ToLeft(p, q, b)=\begin{vmatrix}
x_p & y_p & 1\\
x_q & y_q & 1\\
x_b & y_b & 1\\
\end{vmatrix}\end{align}
'''
return p[0]*q[1] - p[1]*q[0] + \
q[0]*b[1] - q[1]*b[0] + \
b[0]*p[1] - b[1]*p[0]
# 忽略误差,b在向量pq左为正,右为负,O(1)
def ToLeft(p, q, b):
a = cross2(p, q, b)
# 小于误差,认为在向量上
if abs(a) < ev:
return 0
# 隐含else abs(a) >= ev:
return a
# 点d在三角形pqb内,返回True,O(1)
def InTriangle(p, q, b, d):
tl1 = ToLeft(p, q, d)
if abs(tl1) < ev:
tl2 = ToLeft(q, b, d)
tl3 = ToLeft(b, p, d)
if tl2 < ev and tl3 < ev or tl2 > -ev and tl3 > -ev:
return True
return False
if tl1 > ev:
if ToLeft(q, b, d) > -ev and ToLeft(b, p, d) > -ev:
return True
return False
if tl1 < -ev:
if ToLeft(q, b, d) < ev and ToLeft(b, p, d) < ev:
return True
return False
# 点d在三点p,q,b的外接圆内,返回True,O(1)
def InCircle(p, q, b, d):
'''
点与三点圆关系
\begin{align}
InCircle(p, q, b, d)=\begin{vmatrix}
x_p & y_p & x_p^2+y_p^2 & 1\\
x_q & y_q & x_q^2+y_q^2 & 1\\
x_b & y_b & x_b^2+y_b^2 & 1\\
x_d & y_d & x_d^2+y_d^2 & 1\\
\end{vmatrix}\end{align}
'''
a13 = p[0]**2+p[1]**2
a23 = q[0]**2+q[1]**2
a33 = b[0]**2+b[1]**2
a43 = d[0]**2+d[1]**2
det = np.linalg.det([
[p[0], p[1], a13, 1],
[q[0], q[1], a23, 1],
[b[0], b[1], a33, 1],
[d[0], d[1], a43, 1],
])
if det < -ev:
return True
return False
# 三点外接圆圆心,O(1)
def CircumcircleCenter(p, q, b):
'''
\begin{align}
&三点外接圆圆心公式\\
&x=\frac{1}{2}\begin{vmatrix}
1 & x_p^2+y_p^2 & y_p\\
1 & x_q^2+y_q^2 & y_q\\
1 & x_b^2+y_b^2 & y_b\\
\end{vmatrix}/\begin{vmatrix}
1 & x_p & y_p\\
1 & x_q & y_q\\
1 & x_b & y_b\\
\end{vmatrix}\\
&y=\frac{1}{2}\begin{vmatrix}
1 & x_p & x_p^2+y_p^2\\
1 & x_q & x_q^2+y_q^2\\
1 & x_b & x_b^2+y_b^2\\
\end{vmatrix}/\begin{vmatrix}
1 & x_p & y_p\\
1 & x_q & y_q\\
1 & x_b & y_b\\
\end{vmatrix}
\end{align}
'''
a1 = p[0]**2+p[1]**2
a2 = q[0]**2+q[1]**2
a3 = b[0]**2+b[1]**2
det1 = np.linalg.det([
[1, p[0], p[1]],
[1, q[0], q[1]],
[1, b[0], b[1]],
])
if det1 == 0:
print("三点共线")
return None
det2 = np.linalg.det([
[1, a1, p[1]],
[1, a2, q[1]],
[1, a3, b[1]],
])
det3 = np.linalg.det([
[1, p[0], a1],
[1, q[0], a2],
[1, b[0], a3],
])
return np.array([det2/det1, det3/det1, 0])/2
# 面
class Face():
def __init__(self, halfedge):
# 标记访问面
self.Visit = False
# 属于这个面的一个半边
self.HalfEdge = halfedge
# 面对应的桶
self.Bucket = None
# 外接圆圆心,求维诺图的时候用到
self.Center = None
# 顶点
class Vertice():
def __init__(self, point):
# 顶点坐标
self.Point = point
# 由顶点引出的一条半边
self.HalfEdge = None
# 半边
class HalfEdge():
def __init__(self, start, end):
# 标记访问
self.Visit = False
# 边的起点
self.Start = start
# 边的终点
self.End = end
# 边的孪生兄弟
self.Twin = None
# 半边所在的平面
self.Face = None
# 边的前驱
self.Pre = None
# 边的后继
self.Suc = None
# 桶
class Bucket():
def __init__(self, points):
# 桶装的点
self.Points = points
# 桶对应的面
self.Face = None
# 初始化无穷大的网,O(1)
def InitInfNet(points = None):
# 初始化无穷远点
# 逆时针
infv1 = Vertice(np.array([Infinity, 0, 0]))
infv2 = Vertice(np.array([0, Infinity, 0]))
infv3 = Vertice(np.array([-Infinity, -Infinity, 0]))
# 初始化无穷远半边
halfedge1 = HalfEdge(infv1, infv2)
halfedge2 = HalfEdge(infv2, infv3)
halfedge3 = HalfEdge(infv3, infv1)
# 初始化点引出的边
infv1.HalfEdge = halfedge1
infv2.HalfEdge = halfedge2
infv3.HalfEdge = halfedge3
# 初始化无穷大面
face1 = Face(halfedge1)
# 初始化无穷半边的前驱,后继,和所在的面
halfedge1.Pre = halfedge3
halfedge1.Suc = halfedge2
halfedge1.Face = face1
halfedge2.Pre = halfedge1
halfedge2.Suc = halfedge3
halfedge2.Face = face1
halfedge3.Pre = halfedge2
halfedge3.Suc = halfedge1
halfedge3.Face = face1
# 初始化桶,此桶囊括了所有的点
bucket1 = Bucket(points)
bucket1.Face = face1
# 面对应的桶
face1.Bucket = bucket1
return face1
# 得到多边形的带符号面积,对于不自交的多边形,正表示逆时针多边形,负表示顺时针多边形,特殊考虑0,O(n)
def get_polygon_directed_area(polygon):
a = polygon.get_vertices()
l = len(a)
return 1 / 2 * sum([a[i][0] * a[(i + 1) % l][1] - a[(i + 1) % l][0] * a[i][1] for i in range(l)])
# 边翻转,O(1)
def EdgeFlipping(halfedge):
# 记录面的旧visit值
visitvalue = halfedge.Face.Visit
# 待翻转边所在的四边形的顶点
v1 = halfedge.Start
v2 = halfedge.Twin.Suc.End
v3 = halfedge.End
v4 = halfedge.Suc.End
# 顶点的坐标
p1 = v1.Point
p2 = v2.Point
p3 = v3.Point
p4 = v4.Point
# 待翻转边所在的四边形的边,ei由vi引出
e1 = halfedge.Twin.Suc
e2 = halfedge.Twin.Pre
e3 = halfedge.Suc
e4 = halfedge.Pre
# 修改顶点引出的边为非翻转的边(待翻转边所在的四边形的边)
v1.HalfEdge = e1
v2.HalfEdge = e2
v3.HalfEdge = e3
v4.HalfEdge = e4
# 待翻转边所在的四边形的两个桶中的点
oldpoints = [*halfedge.Face.Bucket.Points, *halfedge.Twin.Face.Bucket.Points]
# 重新分桶
newpoints1, newpoints2 = [], []
for oldpoint in oldpoints:
if InTriangle(p1, p2, p4, oldpoint):
newpoints1.append(oldpoint)
else:
newpoints2.append(oldpoint)
# 重新构造的面,逆时针
newface1, newface2 = Face(e1), Face(e2)
newface1.Visit = visitvalue
newface2.Visit = visitvalue
# 构造翻转后的边
e5, e6 = HalfEdge(v2, v4), HalfEdge(v4, v2)
e5.Twin = e6
e6.Twin = e5
e5.Visit = visitvalue
e6.Visit = visitvalue
# 构造newface1的边
e1.Suc = e5
e5.Suc = e4
e4.Suc = e1
e1.Pre = e4
e4.Pre = e5
e5.Pre = e1
# 构造newface2的边
e2.Suc = e3
e3.Suc = e6
e6.Suc = e2
e2.Pre = e6
e6.Pre = e3
e3.Pre = e2
# 边指向newface1
e1.Face = newface1
e4.Face = newface1
e5.Face = newface1
# 边指向newface2
e2.Face = newface2
e3.Face = newface2
e6.Face = newface2
# 构造两个新桶,并维持桶和面的联系
bucket1 = Bucket(newpoints1)
bucket2 = Bucket(newpoints2)
bucket1.Face = newface1
bucket2.Face = newface2
newface1.Bucket = bucket1
newface2.Bucket = bucket2
# 点vo撕裂面face,O(1)
def ClipFace(face, vo, remainedpoints):
visitvalue = face.Visit
hf1 = face.HalfEdge
hf2 = hf1.Suc
hf3 = hf2.Suc
# 剪开面
clipface1 = Face(hf1)
clipface2 = Face(hf2)
clipface3 = Face(hf3)
clipface1.Visit = visitvalue
clipface2.Visit = visitvalue
clipface3.Visit = visitvalue
# face1
hf1_pre = HalfEdge(vo, hf1.Start)
hf1_suc = HalfEdge(hf1.End, vo)
hf1_pre.Visit = visitvalue
hf1_suc.Visit = visitvalue
hf1.Pre = hf1_pre
hf1.Suc = hf1_suc
hf1_pre.Pre = hf1_suc
hf1_pre.Suc = hf1
hf1_suc.Pre = hf1
hf1_suc.Suc = hf1_pre
hf1.Face = clipface1
hf1_pre.Face = clipface1
hf1_suc.Face = clipface1
# face2
hf2_pre = HalfEdge(vo, hf2.Start)
hf2_suc = HalfEdge(hf2.End, vo)
hf2_pre.Visit = visitvalue
hf2_suc.Visit = visitvalue
hf2.Pre = hf2_pre
hf2.Suc = hf2_suc
hf2_pre.Pre = hf2_suc
hf2_pre.Suc = hf2
hf2_suc.Pre = hf2
hf2_suc.Suc = hf2_pre
hf2.Face = clipface2
hf2_pre.Face = clipface2
hf2_suc.Face = clipface2
# face3
hf3_pre = HalfEdge(vo, hf3.Start)
hf3_suc = HalfEdge(hf3.End, vo)
hf3_pre.Visit = visitvalue
hf3_suc.Visit = visitvalue
hf3.Pre = hf3_pre
hf3.Suc = hf3_suc
hf3_pre.Pre = hf3_suc
hf3_pre.Suc = hf3
hf3_suc.Pre = hf3
hf3_suc.Suc = hf3_pre
hf3.Face = clipface3
hf3_pre.Face = clipface3
hf3_suc.Face = clipface3
vo.HalfEdge = hf1_pre
# twin
hf1_pre.Twin = hf3_suc
hf3_suc.Twin = hf1_pre
hf2_pre.Twin = hf1_suc
hf1_suc.Twin = hf2_pre
hf3_pre.Twin = hf2_suc
hf2_suc.Twin = hf3_pre
## 点放入桶
# 桶所在三角形的顶点
point = vo.Point
p1 = hf1.Start.Point
p2 = hf2.Start.Point
p3 = hf3.Start.Point
# 拆分桶
clipbucketps1, clipbucketps2, clipbucketps3 = [], [], []
for eachpoint in remainedpoints:
if InTriangle(p1, p2, point, eachpoint):
clipbucketps1.append(eachpoint)
elif InTriangle(p2, p3, point, eachpoint):
clipbucketps2.append(eachpoint)
else:
clipbucketps3.append(eachpoint)
# 撕裂的平面关联桶
clipbucket1 = Bucket(clipbucketps1)
clipbucket2 = Bucket(clipbucketps2)
clipbucket3 = Bucket(clipbucketps3)
clipface1.Bucket = clipbucket1
clipface2.Bucket = clipbucket2
clipface3.Bucket = clipbucket3
clipbucket1.Face = clipface1
clipbucket2.Face = clipface2
clipbucket3.Face = clipface3
return clipface1, clipface2, clipface3
# 访问网,O(n)
def VisitNet(face):
visitvalue = face.Visit
notvisitvalue = not visitvalue
faces = [face]
# 访问过
face.Visit = notvisitvalue
delaunaynet = []
while faces:
eachface = faces[-1]
faces.pop(-1)
# 面所在的三条边
e1 = eachface.HalfEdge
e2 = e1.Suc
e3 = e2.Suc
## 将正在访问的面的三个相邻的面加入faces
eis = [e1, e2, e3]
for ei in eis:
# ei的孪生兄弟
eiTwin = ei.Twin
# ei未被访问过
if ei.Visit == visitvalue:
ls, le = ei.Start.Point, ei.End.Point
if abs(ls[0]) != Infinity and abs(ls[1]) != Infinity and abs(le[0]) != Infinity and abs(le[1]) != Infinity:
delaunaynet.append([ls, le])
ei.Visit = notvisitvalue
if eiTwin:
faces.append(eiTwin.Face)
# 访问过
eiTwin.Face.Visit = notvisitvalue
eiTwin.Visit = notvisitvalue
return delaunaynet
# 访问三角形,O(n)
def VisitTriangles(face):
# 访问网
visitvalue = face.Visit
notvisitvalue = not visitvalue
faces = [face]
# 访问过
face.Visit = notvisitvalue
delaunaynet = VGroup()
while faces:
eachface = faces[-1]
faces.pop(-1)
# 面所在的三条边
e1 = eachface.HalfEdge
e2 = e1.Suc
e3 = e2.Suc
# 标记访问过
e1.Visit = notvisitvalue
e2.Visit = notvisitvalue
e3.Visit = notvisitvalue
# 面对三个点
p1 = e1.Start.Point
p2 = e2.Start.Point
p3 = e3.Start.Point
delaunaynet.add(Polygon(p1, p2, p3))
ei = [e1, e2, e3]
for each in ei:
et = each.Twin
if et:
etf = et.Face
# 未访问过
if etf.Visit == visitvalue:
# 访问过
etf.Visit = notvisitvalue
faces.append(etf)
return delaunaynet
# 访问维诺图,O(n)
def VisitVoronoi(face):
visitvalue = face.Visit
notvisitvalue = not visitvalue
faces = [face]
# 访问过
face.Visit = notvisitvalue
voronoi = []
while faces:
eachface = faces[-1]
faces.pop(-1)
# 面所在的三条边
e1 = eachface.HalfEdge
e2 = e1.Suc
e3 = e2.Suc
## 将正在访问的面的三个相邻的面加入faces
eis = [e1, e2, e3]
for ei in eis:
# ei的孪生兄弟
eiTwin = ei.Twin
# ei未被访问过
if ei.Visit == visitvalue:
ei.Visit = notvisitvalue
if eiTwin:
ls, le = ei.Start.Point, ei.End.Point
if abs(ls[0]) != Infinity and abs(ls[1]) != Infinity and abs(le[0]) != Infinity and abs(le[1]) != Infinity:
efc, etfc = ei.Face.Center, eiTwin.Face.Center
ese = eiTwin.Suc.End.Point
# 边的对点是无穷点
if abs(ese[0]) == Infinity or abs(ese[1]) == Infinity:
eis, eie = np.array(ei.Start.Point), np.array(ei.End.Point)
vertical = np.cross(eie - eis, np.array([0, 0, 1]))
vertical = normalize(vertical)
vertical = Infinity * vertical
newle = efc + vertical
voronoi.append([efc, newle])
else:
voronoi.append([efc, etfc])
faces.append(eiTwin.Face)
# 访问过
eiTwin.Face.Visit = notvisitvalue
eiTwin.Visit = notvisitvalue
return voronoi
# 给网加圆心,O(n)
def InitNetCircumcircleCenter(face):
# 访问网
visitvalue = face.Visit
notvisitvalue = not visitvalue
faces = [face]
# 访问过
face.Visit = notvisitvalue
#delaunaynet = VGroup()
while faces:
eachface = faces[-1]
faces.pop(-1)
# 面所在的三条边
e1 = eachface.HalfEdge
e2 = e1.Suc
e3 = e2.Suc
# 标记访问过
e1.Visit = notvisitvalue
e2.Visit = notvisitvalue
e3.Visit = notvisitvalue
# 面对三个点
p1 = e1.Start.Point
p2 = e2.Start.Point
p3 = e3.Start.Point
# 赋值圆心
if eachface.Center is None:
eachface.Center = CircumcircleCenter(p1, p2, p3)
#delaunaynet.add(Polygon(p1, p2, p3))
eis = [e1, e2, e3]
for ei in eis:
eit = ei.Twin
if eit:
eitf = eit.Face
# 未访问过
if eitf.Visit == visitvalue:
# 访问过
eitf.Visit = notvisitvalue
faces.append(eitf)
# 构造网,O(nlogn)
def ConstructNet(points=None):
face1 = InitInfNet(points)
infedge = face1.HalfEdge
buckets = [face1.Bucket]
while buckets:
# 取桶
bucket = buckets[-1]
buckets.pop(-1)
# 取桶的点
point = bucket.Points[-1]
bucket.Points.pop(-1)
vo = Vertice(point)
# 桶所在三角形的边
crpface = bucket.Face
hf1 = crpface.HalfEdge
hf2 = hf1.Suc
hf3 = hf2.Suc
# 撕裂面
ClipFace(crpface, vo, bucket.Points)
# 看看是否要边翻转
edges = [hf1, hf2, hf3]
while edges:
eachedge = edges[-1]
edges.pop(-1)
eachedgetwin = eachedge.Twin
if eachedgetwin:
trip1 = vo.Point
trip2 = eachedgetwin.Start.Point
trip3 = eachedgetwin.End.Point
trip4 = eachedgetwin.Suc.End.Point
if InCircle(trip1, trip2, trip3, trip4):
etfb = eachedgetwin.Face.Bucket
if len(etfb.Points) > 0:
buckets.remove(etfb)
edges.append(eachedgetwin.Pre)
edges.append(eachedgetwin.Suc)
EdgeFlipping(eachedge)
# 遍历点周围的所有边,把桶加入
ringvisit = vo.HalfEdge
currvisit = ringvisit.Twin.Suc
while currvisit != ringvisit:
currbucket = currvisit.Face.Bucket
if len(currbucket.Points) > 0:
buckets.append(currbucket)
currvisit = currvisit.Twin.Suc
currbucket = currvisit.Face.Bucket
if len(currbucket.Points) > 0:
buckets.append(currbucket)
return infedge.Face
# 得到某点在网中的面
def get_point_posface(point, net):
# 访问网
visitvalue = net.Visit
notvisitvalue = not visitvalue
faces = [net]
# 访问过
net.Visit = notvisitvalue
# 位置
#posface = None
mark = True
while faces:
eachface = faces[-1]
faces.pop(-1)
# 面所在的三条边
e1 = eachface.HalfEdge
e2 = e1.Suc
e3 = e2.Suc
# 标记访问过
e1.Visit = notvisitvalue
e2.Visit = notvisitvalue
e3.Visit = notvisitvalue
# 面对三个点
p1 = e1.Start.Point
p2 = e2.Start.Point
p3 = e3.Start.Point
# 位置未找到
if mark:
if InTriangle(p1, p2, p3, point):
posface = eachface
ei = [e1, e2, e3]
for each in ei:
et = each.Twin
if et:
etf = et.Face
# 未访问过
if etf.Visit == visitvalue:
# 访问过
etf.Visit = notvisitvalue
faces.append(etf)
return posface
# 在网中插入点,O(n)
def net_insert_point(point, net):
# 点所在的面
posface = get_point_posface(point, net)
posface.Bucket.Points.append(point)
infedge = posface.HalfEdge
buckets = [posface.Bucket]
while buckets:
# 取桶
bucket = buckets[-1]
buckets.pop(-1)
# 取桶的点
point = bucket.Points[-1]
bucket.Points.pop(-1)
vo = Vertice(point)
# 桶所在三角形的边
crpface = bucket.Face
hf1 = crpface.HalfEdge
hf2 = hf1.Suc
hf3 = hf2.Suc
# 撕裂面
ClipFace(crpface, vo, bucket.Points)
# 看看是否要边翻转
edges = [hf1, hf2, hf3]
while edges:
eachedge = edges[-1]
edges.pop(-1)
eachedgetwin = eachedge.Twin
if eachedgetwin:
trip1 = vo.Point
trip2 = eachedgetwin.Start.Point
trip3 = eachedgetwin.End.Point
trip4 = eachedgetwin.Suc.End.Point
if InCircle(trip1, trip2, trip3, trip4):
etfb = eachedgetwin.Face.Bucket
if len(etfb.Points) > 0:
buckets.remove(etfb)
edges.append(eachedgetwin.Pre)
edges.append(eachedgetwin.Suc)
EdgeFlipping(eachedge)
# 遍历点周围的所有边,把桶加入
ringvisit = vo.HalfEdge
currvisit = ringvisit.Twin.Suc
while currvisit != ringvisit:
currbucket = currvisit.Face.Bucket
if len(currbucket.Points) > 0:
buckets.append(currbucket)
currvisit = currvisit.Twin.Suc
currbucket = currvisit.Face.Bucket
if len(currbucket.Points) > 0:
buckets.append(currbucket)
return infedge.Face
# 在网中插入点,并设置外心,O(n)
def net_insert_point_and_set_circumcirclecenter(point, net):
# 点所在的面,O(n)
posface = get_point_posface(point, net)
vo = Vertice(point)
# 桶所在三角形的边
crpface = posface
hf1 = crpface.HalfEdge
hf2 = hf1.Suc
hf3 = hf2.Suc
# 撕裂面
ClipFace(crpface, vo, [])
# 设置外心
hf1.Face.Center = CircumcircleCenter(hf1.Start.Point, hf1.End.Point, point)
hf2.Face.Center = CircumcircleCenter(hf2.Start.Point, hf2.End.Point, point)
hf3.Face.Center = CircumcircleCenter(hf3.Start.Point, hf3.End.Point, point)
# 看看是否要边翻转,O(6)
edges = [hf1, hf2, hf3]
while edges:
eachedge = edges[-1]
edges.pop(-1)
eachedgetwin = eachedge.Twin
if eachedgetwin:
trip1 = vo.Point
trip2 = eachedgetwin.Start.Point
trip3 = eachedgetwin.End.Point
trip4 = eachedgetwin.Suc.End.Point
if InCircle(trip1, trip2, trip3, trip4):
edges.append(eachedgetwin.Pre)
edges.append(eachedgetwin.Suc)
efv1 = eachedge.Suc
efv2 = eachedgetwin.Suc
EdgeFlipping(eachedge)
efv1.Face.Center = CircumcircleCenter(trip1, trip2, trip4)
efv2.Face.Center = CircumcircleCenter(trip1, trip3, trip4)
return vo.HalfEdge.Face
# 德劳内三角网,O(nlogn)
class DelaunayTrianglation(VGroup):
def __init__(self, *points, **kwargs):
digest_config(self, kwargs)
self.net = ConstructNet(list(points))
self.kwargs = kwargs
VGroup.__init__(self, *[Line(*each, **kwargs) for each in self.VisitNet()])
# 获取网的顶点对,即用坐标表示的线
def VisitNet(self):
return VisitNet(self.net)
def VisitTriangles(self):
return VGroup(*VisitTriangles(self.net), **self.kwargs)
# 获取网
def GetNet(self):
return self.net
# 插入节点
def InsertPoint(self, point):
net_insert_point(point, self.net)
self.become(VGroup(*[Line(*each, **self.kwargs) for each in self.VisitNet()]))
return self
# 维诺图,O(n)+O(nlogn)=O(nlogn)
class Voronoi(VGroup):
def __init__(self, *points, **kwargs):
digest_config(self, kwargs)
self.kwargs = kwargs
self.net = DelaunayTrianglation(*points).GetNet()
InitNetCircumcircleCenter(self.net)
self.voronoi = self.VisitVoronoi()
VGroup.__init__(self, *[Line(*each, **kwargs) for each in self.voronoi])
def VisitVoronoi(self):
return VisitVoronoi(self.net)
# 获取网
def GetNet(self):
return self.net
# 插入节点
def InsertPoint(self, point):
net_insert_point_and_set_circumcirclecenter(point, self.net)
self.voronoi = self.VisitVoronoi()
self.become(VGroup(*[Line(*each, **self.kwargs) for each in self.voronoi]))
return self
# 测试类
class test(Scene):
def construct(self):
np.random.seed(2007)
points = [
[np.random.randint(-70000, 70000)/10500, np.random.randint(-38000, 38000)/10500, 0] for i in range(800)
]
#points = [UL, UP, UR, LEFT, ORIGIN, RIGHT, DL, DOWN, DR]
#points = [UL, DR, UR, DL]
dots = [Dot(p).scale(0.5) for p in points]
self.add(*dots)
start = time.perf_counter()
net = Voronoi(*points)
self.add(net)
end = time.perf_counter()
print(end - start)
'''
p1, p2, p3 = DL, UL, UR
p4 = DR
p5 = ORIGIN
p6 = UL/2
p7 = UL
p8 = UL*2
print(InTriangle(p1, p2, p3, p4))
print(InTriangle(p1, p2, p3, p5))
print(InTriangle(p1, p2, p3, p6))
print(InTriangle(p1, p2, p3, p7))
print(InTriangle(p1, p2, p3, p8))
print(InCircle(p1, p2, p3, p4))
print(InCircle(p1, p2, p3, p5))
print(InCircle(p1, p2, p3, p6))
print(InCircle(p1, p2, p3, p7))
print(InCircle(p1, p2, p3, p8))
'''
'''
infnet = InitInfNet()
he1 = infnet.HalfEdge
he2 = he1.Suc
he3 = he2.Suc
print(get_polygon_directed_area(Polygon(he1.Start.Point, he2.Start.Point, he3.Start.Point)))
'''
'''
np.random.seed(2007)
points = [
[np.random.randint(-70000, 70000)/10500, np.random.randint(-38000, 38000)/10500, 0] for i in range(1000)
]
#points = [UL, UP, UR, LEFT, ORIGIN, RIGHT, DL, DOWN, DR]
#points = [UL, DR, UR, DL]
dots = [Dot(p) for p in points]
#self.add(*dots)
start = time.perf_counter()
delaunay = ConstructNet(self, points)
net = VisitNet(delaunay)
end = time.perf_counter()
print(end - start)
self.add(net)
'''
'''
np.random.seed(2000007)
points = [
[np.random.randint(-70000, 70000)/10000, np.random.randint(-38000, 38000)/10000, 0] for i in range(7)
]
dots = [Dot(p) for p in points]
self.add(*dots)
start = time.perf_counter()
delaunay = InitInfNet(points)
#print(points[0])
net1, net2, net3 = ClipFace(delaunay, Vertice(points[0]), points[1:])
net = VisitTriangles(net1)
end = time.perf_counter()
print(end - start)
self.add(net)
'''
'''
p1, p2, p3, p4 = UL, UR*2, DR, DL*2
v1, v2, v3, v4 = Vertice(p1), Vertice(p2), Vertice(p3), Vertice(p4)
he1 = HalfEdge(v1, v2)
he2 = HalfEdge(v2, v3)
he3 = HalfEdge(v3, v4)
he4 = HalfEdge(v4, v1)
he5 = HalfEdge(v3, v1)
he6 = HalfEdge(v1, v3)
he1.Suc = he2
he2.Pre = he1
he2.Suc = he5
he5.Pre = he2
he5.Suc = he1
he1.Pre = he5
he3.Suc = he4
he4.Pre = he3
he4.Suc = he6
he6.Pre = he4
he6.Suc = he3
he3.Pre = he6
bucket1 = Bucket([UR+RIGHT/5, UR+LEFT/5])
bucket2 = Bucket([])
face1 = Face(he1)
face1.Bucket = bucket1
bucket1.Face = face1
he1.Face = face1
he2.Face = face1
he5.Face = face1
face2 = Face(he3)
face2.Bucket = bucket2
bucket2.Face = face2
he3.Face = face2
he4.Face = face2
he6.Face = face2
he5.Twin = he6
he6.Twin = he5
EdgeFlipping(he5)
start = time.perf_counter()
net = VisitInfNet(face1)
end = time.perf_counter()
print(end - start)
print(get_polygon_directed_area(Polygon(face1.HalfEdge.Start.Point, face1.HalfEdge.Suc.Start.Point,
face1.HalfEdge.Suc.Suc.Start.Point)))
print(get_polygon_directed_area(Polygon(face2.HalfEdge.Start.Point, face2.HalfEdge.Suc.Start.Point,
face2.HalfEdge.Suc.Suc.Start.Point)))
self.add(net)
'''
#p1, p2, p3, p4 = UL, UR, DR, DL
#print(InTriangle(p1, p2, p3, ORIGIN), InTriangle(p1, p2, p3, UR/2), InTriangle(p1, p2, p3, p4))
'''
start = time.perf_counter()
print(
InCircle(p1, p2, p3, p4),
InCircle(p1, p2, p3, ORIGIN),
InCircle(p1, p2, p3, p4+LEFT)
)
end = time.perf_counter()
print(end - start)
start = time.perf_counter()
print(
InCircle2(p1, p2, p3, p4),
InCircle2(p1, p2, p3, ORIGIN),
InCircle2(p1, p2, p3, p4+LEFT)
)
end = time.perf_counter()
print(end - start)
'''
self.wait()
|
[
"manimlib.utils.space_ops.normalize",
"numpy.random.seed",
"manimlib.utils.config_ops.digest_config",
"manimlib.mobject.types.vectorized_mobject.VGroup",
"manimlib.mobject.geometry.Polygon",
"numpy.linalg.det",
"numpy.array",
"numpy.exp",
"numpy.random.randint",
"numpy.dot",
"manimlib.mobject.geometry.Line",
"manimlib.mobject.geometry.Dot"
] |
[((2436, 2543), 'numpy.linalg.det', 'np.linalg.det', (['[[p[0], p[1], a13, 1], [q[0], q[1], a23, 1], [b[0], b[1], a33, 1], [d[0], d\n [1], a43, 1]]'], {}), '([[p[0], p[1], a13, 1], [q[0], q[1], a23, 1], [b[0], b[1], a33,\n 1], [d[0], d[1], a43, 1]])\n', (2449, 2543), True, 'import numpy as np\n'), ((3336, 3402), 'numpy.linalg.det', 'np.linalg.det', (['[[1, p[0], p[1]], [1, q[0], q[1]], [1, b[0], b[1]]]'], {}), '([[1, p[0], p[1]], [1, q[0], q[1]], [1, b[0], b[1]]])\n', (3349, 3402), True, 'import numpy as np\n'), ((3513, 3573), 'numpy.linalg.det', 'np.linalg.det', (['[[1, a1, p[1]], [1, a2, q[1]], [1, a3, b[1]]]'], {}), '([[1, a1, p[1]], [1, a2, q[1]], [1, a3, b[1]]])\n', (3526, 3573), True, 'import numpy as np\n'), ((3621, 3681), 'numpy.linalg.det', 'np.linalg.det', (['[[1, p[0], a1], [1, q[0], a2], [1, b[0], a3]]'], {}), '([[1, p[0], a1], [1, q[0], a2], [1, b[0], a3]])\n', (3634, 3681), True, 'import numpy as np\n'), ((11636, 11644), 'manimlib.mobject.types.vectorized_mobject.VGroup', 'VGroup', ([], {}), '()\n', (11642, 11644), False, 'from manimlib.mobject.types.vectorized_mobject import VGroup\n'), ((697, 706), 'numpy.exp', 'np.exp', (['(1)'], {}), '(1)\n', (703, 706), True, 'import numpy as np\n'), ((833, 844), 'numpy.array', 'np.array', (['p'], {}), '(p)\n', (841, 844), True, 'import numpy as np\n'), ((846, 857), 'numpy.array', 'np.array', (['q'], {}), '(q)\n', (854, 857), True, 'import numpy as np\n'), ((892, 912), 'numpy.dot', 'np.dot', (['(q - p)', '(q - p)'], {}), '(q - p, q - p)\n', (898, 912), True, 'import numpy as np\n'), ((3729, 3768), 'numpy.array', 'np.array', (['[det2 / det1, det3 / det1, 0]'], {}), '([det2 / det1, det3 / det1, 0])\n', (3737, 3768), True, 'import numpy as np\n'), ((4786, 4812), 'numpy.array', 'np.array', (['[Infinity, 0, 0]'], {}), '([Infinity, 0, 0])\n', (4794, 4812), True, 'import numpy as np\n'), ((4835, 4861), 'numpy.array', 'np.array', (['[0, Infinity, 0]'], {}), '([0, Infinity, 0])\n', (4843, 4861), True, 'import numpy as np\n'), ((4884, 4919), 'numpy.array', 'np.array', (['[-Infinity, -Infinity, 0]'], {}), '([-Infinity, -Infinity, 0])\n', (4892, 4919), True, 'import numpy as np\n'), ((21418, 21445), 'manimlib.utils.config_ops.digest_config', 'digest_config', (['self', 'kwargs'], {}), '(self, kwargs)\n', (21431, 21445), False, 'from manimlib.utils.config_ops import digest_config\n'), ((22163, 22190), 'manimlib.utils.config_ops.digest_config', 'digest_config', (['self', 'kwargs'], {}), '(self, kwargs)\n', (22176, 22190), False, 'from manimlib.utils.config_ops import digest_config\n'), ((22917, 22937), 'numpy.random.seed', 'np.random.seed', (['(2007)'], {}), '(2007)\n', (22931, 22937), True, 'import numpy as np\n'), ((12059, 12078), 'manimlib.mobject.geometry.Polygon', 'Polygon', (['p1', 'p2', 'p3'], {}), '(p1, p2, p3)\n', (12066, 12078), False, 'from manimlib.mobject.geometry import Dot, Line, Polygon\n'), ((21556, 21577), 'manimlib.mobject.geometry.Line', 'Line', (['*each'], {}), '(*each, **kwargs)\n', (21560, 21577), False, 'from manimlib.mobject.geometry import Dot, Line, Polygon\n'), ((22402, 22423), 'manimlib.mobject.geometry.Line', 'Line', (['*each'], {}), '(*each, **kwargs)\n', (22406, 22423), False, 'from manimlib.mobject.geometry import Dot, Line, Polygon\n'), ((22972, 23004), 'numpy.random.randint', 'np.random.randint', (['(-70000)', '(70000)'], {}), '(-70000, 70000)\n', (22989, 23004), True, 'import numpy as np\n'), ((23012, 23044), 'numpy.random.randint', 'np.random.randint', (['(-38000)', '(38000)'], {}), '(-38000, 38000)\n', (23029, 23044), True, 'import numpy as np\n'), ((23206, 23212), 'manimlib.mobject.geometry.Dot', 'Dot', (['p'], {}), '(p)\n', (23209, 23212), False, 'from manimlib.mobject.geometry import Dot, Line, Polygon\n'), ((21975, 22001), 'manimlib.mobject.geometry.Line', 'Line', (['*each'], {}), '(*each, **self.kwargs)\n', (21979, 22001), False, 'from manimlib.mobject.geometry import Dot, Line, Polygon\n'), ((22775, 22801), 'manimlib.mobject.geometry.Line', 'Line', (['*each'], {}), '(*each, **self.kwargs)\n', (22779, 22801), False, 'from manimlib.mobject.geometry import Dot, Line, Polygon\n'), ((13684, 13703), 'manimlib.utils.space_ops.normalize', 'normalize', (['vertical'], {}), '(vertical)\n', (13693, 13703), False, 'from manimlib.utils.space_ops import normalize\n'), ((13514, 13538), 'numpy.array', 'np.array', (['ei.Start.Point'], {}), '(ei.Start.Point)\n', (13522, 13538), True, 'import numpy as np\n'), ((13540, 13562), 'numpy.array', 'np.array', (['ei.End.Point'], {}), '(ei.End.Point)\n', (13548, 13562), True, 'import numpy as np\n'), ((13623, 13642), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (13631, 13642), True, 'import numpy as np\n')]
|
##############################################################################
#
# Copyright (c) 2003-2018 by The University of Queensland
# http://www.uq.edu.au
#
# Primary Business: Queensland, Australia
# Licensed under the Apache License, version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Development until 2012 by Earth Systems Science Computational Center (ESSCC)
# Development 2012-2013 by School of Earth Sciences
# Development from 2014 by Centre for Geoscience Computing (GeoComp)
#
##############################################################################
from __future__ import print_function, division
__copyright__="""Copyright (c) 2003-2018 by The University of Queensland
http://www.uq.edu.au
Primary Business: Queensland, Australia"""
__license__="""Licensed under the Apache License, version 2.0
http://www.apache.org/licenses/LICENSE-2.0"""
__url__="https://launchpad.net/escript-finley"
"""
Test suite for the pdetools module
The tests must be linked with a Domain class object in the setUp method:
from esys.dudley import Rectangle
class Test_LinearPDEOnDudley(Test_LinearPDE):
RES_TOL=1.e-8
def setUp(self):
self.domain = Rectangle(10,10,2)
def tearDown(self):
del self.domain
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(Test_LinearPDEOnDudley))
unittest.TextTestRunner(verbosity=2).run(suite)
:var __author__: name of author
:var __copyright__: copyrights
:var __license__: licence agreement
:var __url__: url entry point on documentation
:var __version__: version
:var __date__: date of the version
"""
__author__="<NAME>, <EMAIL>"
import esys.escriptcore.utestselect as unittest
import numpy
from esys.escript import *
from esys.escript.pdetools import Locator,Projector,TimeIntegrationManager,NoPDE,PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem
from esys.escript.pdetools import Defect, NewtonGMRES
from numpy.linalg import solve as solve_linear_equations
class Test_pdetools_noLumping(unittest.TestCase):
DEBUG=False
VERBOSE=False
def test_TimeIntegrationManager_scalar(self):
t=0.
dt=0.1
tm=TimeIntegrationManager(0.,p=1)
while t<1.:
t+=dt
tm.checkin(dt,t)
v_guess=tm.extrapolate(dt)
self.assertLess(abs(v_guess-(tm.getTime()+dt)), self.RES_TOL, "extrapolation is wrong")
def test_TimeIntegrationManager_vector(self):
t=0.
dt=0.3
tm=TimeIntegrationManager(0.,0.,p=1)
while t<1.:
t+=dt
tm.checkin(dt,t,3*t)
v_guess=tm.extrapolate(dt)
e=max(abs(v_guess[0]-(tm.getTime()+dt)),abs(v_guess[1]-(tm.getTime()+dt)*3.))
self.assertLess(e, self.RES_TOL)
def test_Locator(self):
x=self.domain.getX()
l=Locator(self.domain,numpy.ones((self.domain.getDim(),)))
self.assertTrue(ContinuousFunction(self.domain)==l.getFunctionSpace(),"wrong function space from domain")
l=Locator(ContinuousFunction(self.domain),numpy.ones((self.domain.getDim(),)))
self.assertTrue(ContinuousFunction(self.domain)==l.getFunctionSpace(),"wrong function space")
xx=l.getX()
self.assertTrue(isinstance(xx,numpy.ndarray),"wrong vector type")
self.assertTrue(Lsup(xx-numpy.ones((self.domain.getDim(),)))<self.RES_TOL,"location wrong")
xx=l(x)
self.assertTrue(isinstance(xx,numpy.ndarray),"wrong vector type")
self.assertTrue(Lsup(xx-numpy.ones((self.domain.getDim(),)))<self.RES_TOL,"value wrong vector")
xx=l(x[0]+x[1])
self.assertTrue(isinstance(xx,float),"wrong scalar type")
self.assertTrue(abs(xx-2.)<self.RES_TOL,"value wrong scalar")
# now with interpolation:
l=Locator(Function(self.domain),numpy.ones((self.domain.getDim(),)))
x2=Function(self.domain).getX()
xx=l(x)
self.assertTrue(isinstance(xx,numpy.ndarray),"wrong vector type")
self.assertTrue(Lsup(xx-l(x2))<self.RES_TOL,"location wrong")
xx=l(x[0]+x[1])
self.assertTrue(isinstance(xx,float),"wrong scalar type")
self.assertLess(abs(xx-l(x2[0])-l(x2[1])), self.RES_TOL)
l=Locator(self.domain,numpy.ones((self.domain.getDim(),)))
d=Data(0, ContinuousFunction(self.domain))
l.setValue(d, 7)
self.assertTrue(sup(d)>6, "value not set") # guarantees we have set something
self.assertLess(Lsup(l.getValue(d)-7), self.RES_TOL, "value not set in the correct place")
def test_Locator_withList(self):
x=self.domain.getX()
arg=[numpy.ones((self.domain.getDim(),)), numpy.zeros((self.domain.getDim(),))]
l=Locator(self.domain,arg)
self.assertTrue(ContinuousFunction(self.domain)==l.getFunctionSpace(),"wrong function space from domain")
l=Locator(ContinuousFunction(self.domain),arg)
self.assertTrue(ContinuousFunction(self.domain)==l.getFunctionSpace(),"wrong function space")
xx=l.getX()
self.assertTrue(isinstance(xx,list),"list expected")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],numpy.ndarray),"vector expected for %s item"%i)
self.assertTrue(Lsup(xx[i]-arg[i])<self.RES_TOL,"%s-th location is wrong"%i)
xx=l(x)
self.assertTrue(isinstance(xx,list),"list expected (2)")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],numpy.ndarray),"vector expected for %s item (2)"%i)
self.assertTrue(Lsup(xx[i]-arg[i])<self.RES_TOL,"%s-th location is wrong (2)"%i)
xx=l(x[0]+x[1])
self.assertTrue(isinstance(xx,list),"list expected (3)")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],float),"wrong scalar type")
self.assertTrue(abs(xx[i]-(arg[i][0]+arg[i][1]))<self.RES_TOL,"value wrong scalar")
# now with interpolation:
l=Locator(Function(self.domain),arg)
self.assertTrue(Function(self.domain)==l.getFunctionSpace(),"wrong function space")
xx=l(x)
x2=Function(self.domain).getX()
self.assertTrue(isinstance(xx,list),"list expected (2)")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],numpy.ndarray),"vector expected for %s item (2)"%i)
self.assertTrue(Lsup(xx[i]-l(x2)[i])<self.RES_TOL,"%s-th location is wrong (2)"%i)
xx=l(x[0]+x[1])
self.assertTrue(isinstance(xx,list),"list expected (3)")
for i in range(len(xx)):
self.assertTrue(isinstance(xx[i],float),"wrong scalar type")
self.assertLess(abs(xx[i]-(l(x2[0])[i]+l(x2[1])[i])), self.RES_TOL)
l=Locator(self.domain,numpy.ones((self.domain.getDim(),)))
d=Data(0, ContinuousFunction(self.domain))
l.setValue(d, 7)
self.assertTrue(sup(d)>6, "value not set") # guarantees we have set something
self.assertLess(Lsup(l.getValue(d)-7), self.RES_TOL, "value not set in the correct place")
def testProjector_rank0(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=x[0]
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank1(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=x
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank2(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[11.,12.],[21,22.]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank3(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank4(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank0_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=x[0]
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank1_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=x
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank2_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=[[11.,12.],[21,22.]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank3_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=[[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank4_reduced(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=[[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank0_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=x[0]
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank1_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=x
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank2_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[11.,12.],[21,22.]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank3_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank4_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=False,fast=False)
td_ref=[[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank0_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=1.
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank1_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=numpy.array([1.,2.,3.])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank2_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=numpy.array([[11.,12.],[21,22.]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank3_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=numpy.array([[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def testProjector_rank4_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
p=Projector(self.domain,reduce=True,fast=False)
td_ref=numpy.array([[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*self.RES_TOL)
def test_NoPDE_scalar_missing_r(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])
p.setValue(D=1.,Y=1.,q=msk)
u=p.getSolution()
u_ex=(1.-msk)
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_scalar_missing_Y(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])
p.setValue(D=1.,q=msk,r=2.)
u=p.getSolution()
u_ex=msk*2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_scalar_constant(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])
p.setValue(D=1.,Y=1.,q=msk,r=2.)
u=p.getSolution()
u_ex=(1.-msk)+msk*2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_scalar_variable(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])
p.setValue(D=10,Y=2*10,q=msk,r=2.)
u=p.getSolution()
u_ex=2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_vector_missing_Y(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])*[1.,0.]
p.setValue(D=numpy.ones([2]),q=msk,r=2.)
u=p.getSolution()
u_ex=msk*2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_vector_missing_r(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])*[1.,0.]
p.setValue(D=numpy.ones([2]),Y=numpy.ones([2]),q=msk)
u=p.getSolution()
u_ex=(1.-msk)
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_vector_constant(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])*[1.,0.]
p.setValue(D=numpy.ones([2]),Y=numpy.ones([2]),q=msk,r=2.)
u=p.getSolution()
u_ex=(1.-msk)+msk*2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
def test_NoPDE_vector_variable(self):
p=NoPDE(self.domain)
x=self.domain.getX()
msk=whereZero(x[0])*[1.,0.]
p.setValue(D=x[:2]+1,Y=2*(x[:2]+1),q=msk,r=2.)
u=p.getSolution()
u_ex=2.
self.assertLess(Lsup(u_ex-u), Lsup(u_ex)*self.RES_TOL)
#=====
def testPCG(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
return dot(A,x)
def Ms(b):
out=zeros((b.size,),float64)
for i in range(size(b)):
out[i]=b[i]/A[i,i]
return out
tol=1.e-4
x,r,a_norm=PCG(b*1.,Ap,x_ref*0.,Ms,dot, atol=0, rtol=tol, iter_max=12)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
self.assertLess(Lsup(r-(b-dot(A,x))), Lsup(b)*EPSILON*100.)
def testMINRES(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
return dot(A,x)
def Ms(b):
out=zeros((size(b),),float64)
for i in range(size(b)):
out[i]=b[i]/A[i,i]
return out
tol=1.e-4
x=MINRES(b*1.,Ap,x_ref*0,Ms,dot, atol=0, rtol=tol, iter_max=12)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
def testTFQMR(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
out=dot(A,x)
for i in range(size(x)):
out[i]/=A[i,i]
return out
tol=1.e-5
for i in range(size(b)): b[i]/=A[i,i]
x=TFQMR(b,Ap,x_ref*0,dot, atol=0, rtol=tol, iter_max=12)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
def testGMRES(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
b=dot(A,x)
for i in range(size(b)):
b[i]/=A[i,i]
return b
tol=1.e-4
for i in range(size(b)): b[i]/=A[i,i]
x=GMRES(b,Ap,x_ref*0,dot,atol=0, rtol=tol, iter_max=12)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
def testGMRES_P_R(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def Ap(x):
return dot(A,x)
def P_Rp(x):
out=zeros(size(x), float64)
for i in range(size(x)):
out[i]=x[i]/A[i,i]
return out
tol=1.e-4
x=GMRES(b,Ap,x_ref*0,dot,atol=0, rtol=tol, iter_max=12,P_R=P_Rp)
self.assertLess(Lsup(x-x_ref), Lsup(x_ref)*tol*10.)
def testNewtonGMRES(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
class LL(Defect):
def __init__(self,*kwargs):
super(LL, self).__init__(*kwargs)
self.A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
self.x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
self.b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def eval(self,x):
out=dot(self.A,x)-self.b
for i in range(size(self.b)):
out[i]/=self.A[i,i]
return out
def bilinearform(self,x0,x1):
return dot(x0,x1)
tol=1.e-8
ll=LL()
x=NewtonGMRES(LL(),ll.x_ref*0., iter_max=100, sub_iter_max=20, atol=0,rtol=tol, verbose=self.VERBOSE)
self.assertLess(Lsup(x-ll.x_ref), Lsup(ll.x_ref)*tol*10.)
def testNewtonGMRES(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
class LL(Defect):
def __init__(self,*kwargs):
super(LL, self).__init__(*kwargs)
self.A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
self.x_ref=array([ 0.41794207085296, 0.031441086046563, 0.882801683420401,
0.807186823427233, 0.48950999450145, 0.995486532098031,
0.351243009576568, 0.704352576819321, 0.850648989740204,
0.314596738052894])
self.b=array([ 182.911023960262952, -1.048322041992754, 44.181293875206201,
30.344553414038817, 15.247917439094513, 24.060664905403492,
27.210293789825833, 47.122067744075842, 199.267136417856847,
-8.7934289814322 ])
def eval(self,x):
out=dot(self.A,x)-self.b
for i in range(size(self.b)):
out[i]/=self.A[i,i]
return out
def bilinearform(self,x0,x1):
return dot(x0,x1)
tol=1.e-8
ll=LL()
x=NewtonGMRES(LL(),ll.x_ref*0., iter_max=100, sub_iter_max=20, atol=0,rtol=tol, verbose=self.VERBOSE)
self.assertLess(Lsup(x-ll.x_ref), Lsup(ll.x_ref)*tol*10.)
def testHomogeneousSaddlePointProblem_PCG(self):
from numpy import array, dot, zeros, size, float64
from math import sqrt
class LL(HomogeneousSaddlePointProblem):
def initialize(self):
self.A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
self.x_ref=array([ 0.100225501676291, -0.308862704993209, 0.064097238997721,
0.253012436539738, -0.346223308561905, 0.2425508275422,
-0.194695862196008, 0.09451439391473, 0.302961126826511,
-0.236043777597633] )
self.Bt=array([[ 0.01627853113636 ,0.06688235764255 , 0.004870689484614],
[ 0.062879587145773 ,0.038798770300146, 0.022155850155616],
[ 0.09312121957248 ,0.110244632756116, 0.14053347386784 ],
[ 0.059000597728388 ,0.090986953740106, 0.035316011834982],
[ 0.091209362659698 ,0.13205572801294 , 0.069462874306956],
[ 0.077790176986096 ,0.133626423045765, 0.011149969846981],
[ 0.01407283482513 ,0.094910926488907, 0.133498532648644],
[ 0.025728916673085 ,0.102542818811672, 0.13657268163218 ],
[ 0.071254288170748 ,0.071738715618163, 0.078005951991733],
[ 0.049463014576779 ,0.103559223780991, 0.003356415647637]])
self.p_ref = array([ 2.580984952252628 ,4.054090902056985, 0.935138168128546])
self.b=array([ 123.322775367582238, -51.556206655564573 , 16.220697868056913,
6.512480714694167 , -5.727371407390975 , 4.802494840775022,
-4.171606044721161 , -1.862366353566293 ,74.850226163257105,
-118.602464657076439])
self.Sinv=array([[ 9313.705360982807179,-5755.536981691270739, 806.289245589733696],
[-5755.536981691271649, 4606.321002756208145,-1630.50619635660928 ],
[ 806.289245589733468,-1630.506196356609053, 2145.65035816388945 ]])
def inner_pBv(self,p,Bv):
return dot(p,Bv)
def Bv(self,v, tol):
return dot(transpose(self.Bt),v)
def inner_p(self,p0,p1):
return dot(p0,p1)
def norm_v(self,v):
return sqrt(dot(v,v))
def getDV(self,p,v, tol):
dv=solve_linear_equations(self.A, self.b-dot(self.Bt,p)-dot(self.A,v))
return dv*(1+tol)
def norm_Bv(self,Bv):
return sqrt(dot(Bv,Bv))
def solve_AinvBt(self,p, tol):
out=solve_linear_equations(self.A, dot(self.Bt,p))
return out*(1.+tol)
def solve_prec(self,Bv, tol):
out=Bv*1.
for i in range(size(out)): out[i]*=self.Sinv[i,i]
return out*(1-tol)
tol=1.e-8
ll=LL()
ll.initialize()
ll.setTolerance(tol)
# ll.setSubToleranceReductionFactor(0.1)
x,p=ll.solve(ll.x_ref*1.20,ll.p_ref*(-2),max_iter=20, verbose=False, usePCG=True, iter_restart=20,max_correction_steps=10)
self.assertLess(Lsup(x-ll.x_ref), Lsup(ll.x_ref)*tol*10.)
self.assertLess(Lsup(p-ll.p_ref), Lsup(ll.p_ref)*tol*10.)
def testHomogeneousSaddlePointProblem_GMRES(self):
from numpy import array, prod, dot, zeros, size, float64
from math import sqrt
class LL(HomogeneousSaddlePointProblem):
def initialize(self):
self.A=array([[ 4.752141253159452e+02, -2.391895572674098e-01,
5.834798554135237e-01, -3.704394311709722e+00,
5.765369186984777e+00, -1.309786358737351e+01,
2.522087134507148e+01, -3.393956279045637e+01,
1.046856914770830e+02, -2.447764190849540e+02],
[ -2.391895572674098e-01, 1.256797283910693e+02,
-9.188270412920813e-01, 1.300169538880688e+00,
-5.353714719231424e-01, 2.674709444667012e+00,
-1.116097841269580e+01, 2.801193427514478e+01,
-3.877806125898224e+01, 3.063505753648256e+01],
[ 5.834798554135237e-01, -9.188270412920813e-01,
6.240841811806843e+01, -8.176289504109282e-01,
1.447935098417076e-01, -9.721424148655324e-01,
6.713551574117577e-01, -3.656297654168375e+00,
7.015141656913973e+00, -4.195525932156250e+01],
[ -3.704394311709722e+00, 1.300169538880688e+00,
-8.176289504109282e-01, 3.604980536782198e+01,
-6.241238423759328e-01, 1.142345320047869e+00,
-3.438816797096519e+00, 5.854857481367470e+00,
-4.524311288596452e+00, 1.136590280389803e+01],
[ 5.765369186984777e+00, -5.353714719231424e-01,
1.447935098417076e-01, -6.241238423759328e-01,
2.953997190215862e+01, -9.474729233464712e-01,
1.883516378345809e+00, -1.906274765704230e+00,
4.401859671778645e+00, -1.064573816075257e+01],
[ -1.309786358737351e+01, 2.674709444667012e+00,
-9.721424148655324e-01, 1.142345320047869e+00,
-9.474729233464712e-01, 2.876998216302979e+01,
-4.853065259692995e-01, 7.088596468102618e-01,
-8.972224295152829e-01, 5.228606946522749e+00],
[ 2.522087134507148e+01, -1.116097841269580e+01,
6.713551574117577e-01, -3.438816797096519e+00,
1.883516378345809e+00, -4.853065259692995e-01,
5.121175860935919e+01, -3.523133115905478e-01,
1.782136702229135e+00, -1.560849559916187e+00],
[ -3.393956279045637e+01, 2.801193427514478e+01,
-3.656297654168375e+00, 5.854857481367470e+00,
-1.906274765704230e+00, 7.088596468102618e-01,
-3.523133115905478e-01, 8.411681423853814e+01,
-5.238590858177903e-01, 1.515872114883926e+00],
[ 1.046856914770830e+02, -3.877806125898224e+01,
7.015141656913973e+00, -4.524311288596452e+00,
4.401859671778645e+00, -8.972224295152829e-01,
1.782136702229135e+00, -5.238590858177903e-01,
1.797889693808014e+02, -8.362340479938084e-01],
[ -2.447764190849540e+02, 3.063505753648256e+01,
-4.195525932156250e+01, 1.136590280389803e+01,
-1.064573816075257e+01, 5.228606946522749e+00,
-1.560849559916187e+00, 1.515872114883926e+00,
-8.362340479938084e-01, 3.833719335346630e+02]])
self.x_ref=array([ 0.100225501676291, -0.308862704993209, 0.064097238997721,
0.253012436539738, -0.346223308561905, 0.2425508275422,
-0.194695862196008, 0.09451439391473, 0.302961126826511,
-0.236043777597633] )
self.Bt=array([[ 0.01627853113636 ,0.06688235764255 , 0.004870689484614],
[ 0.062879587145773 ,0.038798770300146, 0.022155850155616],
[ 0.09312121957248 ,0.110244632756116, 0.14053347386784 ],
[ 0.059000597728388 ,0.090986953740106, 0.035316011834982],
[ 0.091209362659698 ,0.13205572801294 , 0.069462874306956],
[ 0.077790176986096 ,0.133626423045765, 0.011149969846981],
[ 0.01407283482513 ,0.094910926488907, 0.133498532648644],
[ 0.025728916673085 ,0.102542818811672, 0.13657268163218 ],
[ 0.071254288170748 ,0.071738715618163, 0.078005951991733],
[ 0.049463014576779 ,0.103559223780991, 0.003356415647637]])
self.p_ref = array([ 2.580984952252628 ,4.054090902056985, 0.935138168128546])
self.b=array([ 123.322775367582238, -51.556206655564573 , 16.220697868056913,
6.512480714694167 , -5.727371407390975 , 4.802494840775022,
-4.171606044721161 , -1.862366353566293 ,74.850226163257105,
-118.602464657076439])
self.Sinv=array([[ 9313.705360982807179,-5755.536981691270739, 806.289245589733696],
[-5755.536981691271649, 4606.321002756208145,-1630.50619635660928 ],
[ 806.289245589733468,-1630.506196356609053, 2145.65035816388945 ]])
def inner_pBv(self,p,Bv):
return dot(p,Bv)
def Bv(self,v, tol):
return dot(transpose(self.Bt),v)
def inner_p(self,p0,p1):
return dot(p0,p1)
def norm_v(self,v):
return sqrt(dot(v,v))
def getDV(self,p,v, tol):
dv=solve_linear_equations(self.A, self.b-dot(self.Bt,p)-dot(self.A,v))
return dv*(1+tol)
def norm_Bv(self,Bv):
return sqrt(dot(Bv,Bv))
def solve_AinvBt(self,p, tol):
out=solve_linear_equations(self.A, dot(self.Bt,p))
return out*(1.+tol)
def solve_prec(self,Bv, tol):
out=Bv*1.
for i in range(size(out)): out[i]*=self.Sinv[i,i]
return out*(1-tol)
tol=1.e-8
ll=LL()
ll.initialize()
ll.setTolerance(tol)
# ll.setSubToleranceReductionFactor(0.1)
x,p=ll.solve(ll.x_ref*1.20,ll.p_ref*(-2),max_iter=20, verbose=False, usePCG=False,
iter_restart=20,max_correction_steps=10)
self.assertLess(Lsup(x-ll.x_ref), Lsup(ll.x_ref)*tol*10.)
self.assertLess(Lsup(p-ll.p_ref), Lsup(ll.p_ref)*tol*10.)
def testArithmeticTuple(self):
a=ArithmeticTuple(1.,2.)
self.assertTrue(len(a)==2, "wrong length")
self.assertTrue(a[0]==1., "wrong first item")
self.assertTrue(a[1]==2., "wrong second item")
c=a*6.
self.assertTrue(isinstance(c,ArithmeticTuple), "c is not an instance of ArithmeticTuple")
self.assertTrue(len(c)==2, "c has wrong length")
self.assertTrue(c[0]==6., "c has wrong first item")
self.assertTrue(c[1]==12., "c has wrong second item")
b=5.*a
self.assertTrue(isinstance(b,ArithmeticTuple),"b is not an instance of ArithmeticTuple")
self.assertTrue(len(b)==2, "b has wrong length")
self.assertTrue(b[0]==5., "b has wrong first item")
self.assertTrue(b[1]==10., "b has wrong second item")
a+=ArithmeticTuple(3.,4.)
self.assertTrue(a[0]==4., "wrong first item of inplace update")
self.assertTrue(a[1]==6., "wrong second item of inplace update")
class Test_pdetools(Test_pdetools_noLumping):
def testProjector_rank0_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=x[0]
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank1_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=x
res=td_ref.interpolate(Function(self.domain))
td=p(res)
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank2_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=[[11.,12.],[21,22.]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank3_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=[[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank4_fast_reduced(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=[[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]]*(x[0]+x[1])
td=p(td_ref.interpolate(Function(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank0_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=1.
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank1_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=numpy.array([1.,2.,3.])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank2_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=numpy.array([[11.,12.],[21,22.]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank3_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=numpy.array([[[111.,112.],[121,122.]],[[211.,212.],[221,222.]]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
def testProjector_rank4_fast_reduced_with_reduced_input(self):
x=ContinuousFunction(self.domain).getX()
h=Lsup(self.domain.getSize())
p=Projector(self.domain,reduce=True,fast=True)
td_ref=numpy.array([[[[1111.,1112.],[1121,1122.]],[[1211.,1212.],[1221,1222.]]],
[[[2111.,2112.],[2121,2122.]],[[2211.,2212.],[2221,2222.]]]])
td=p(Data(td_ref,ReducedFunction(self.domain)))
self.assertLess(Lsup(td-td_ref), Lsup(td_ref)*h)
|
[
"esys.escript.pdetools.TFQMR",
"numpy.size",
"esys.escript.pdetools.Projector",
"esys.escript.pdetools.GMRES",
"numpy.zeros",
"numpy.ones",
"esys.escript.pdetools.MINRES",
"esys.escript.pdetools.NoPDE",
"esys.escript.pdetools.PCG",
"numpy.array",
"numpy.dot",
"esys.escript.pdetools.TimeIntegrationManager",
"esys.escript.pdetools.ArithmeticTuple",
"esys.escript.pdetools.Locator"
] |
[((2185, 2217), 'esys.escript.pdetools.TimeIntegrationManager', 'TimeIntegrationManager', (['(0.0)'], {'p': '(1)'}), '(0.0, p=1)\n', (2207, 2217), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((2502, 2539), 'esys.escript.pdetools.TimeIntegrationManager', 'TimeIntegrationManager', (['(0.0)', '(0.0)'], {'p': '(1)'}), '(0.0, 0.0, p=1)\n', (2524, 2539), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((4721, 4746), 'esys.escript.pdetools.Locator', 'Locator', (['self.domain', 'arg'], {}), '(self.domain, arg)\n', (4728, 4746), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((7128, 7176), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (7137, 7176), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((7404, 7452), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (7413, 7452), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((7677, 7725), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (7686, 7725), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((7981, 8029), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (7990, 8029), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((8316, 8364), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (8325, 8364), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((8732, 8779), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (8741, 8779), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((9015, 9062), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (9024, 9062), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((9295, 9342), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (9304, 9342), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((9606, 9653), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (9615, 9653), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((9948, 9995), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (9957, 9995), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((10373, 10421), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (10382, 10421), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((10668, 10716), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (10677, 10716), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((10960, 11008), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (10969, 11008), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((11283, 11331), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (11292, 11331), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((11637, 11685), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(False)', 'fast': '(False)'}), '(self.domain, reduce=False, fast=False)\n', (11646, 11685), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((12072, 12119), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (12081, 12119), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((12372, 12419), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (12381, 12419), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((12431, 12459), 'numpy.array', 'numpy.array', (['[1.0, 2.0, 3.0]'], {}), '([1.0, 2.0, 3.0])\n', (12442, 12459), False, 'import numpy\n'), ((12693, 12740), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (12702, 12740), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((12752, 12791), 'numpy.array', 'numpy.array', (['[[11.0, 12.0], [21, 22.0]]'], {}), '([[11.0, 12.0], [21, 22.0]])\n', (12763, 12791), False, 'import numpy\n'), ((13024, 13071), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (13033, 13071), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((13083, 13160), 'numpy.array', 'numpy.array', (['[[[111.0, 112.0], [121, 122.0]], [[211.0, 212.0], [221, 222.0]]]'], {}), '([[[111.0, 112.0], [121, 122.0]], [[211.0, 212.0], [221, 222.0]]])\n', (13094, 13160), False, 'import numpy\n'), ((13386, 13433), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(False)'}), '(self.domain, reduce=True, fast=False)\n', (13395, 13433), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((13445, 13615), 'numpy.array', 'numpy.array', (['[[[[1111.0, 1112.0], [1121, 1122.0]], [[1211.0, 1212.0], [1221, 1222.0]]],\n [[[2111.0, 2112.0], [2121, 2122.0]], [[2211.0, 2212.0], [2221, 2222.0]]]]'], {}), '([[[[1111.0, 1112.0], [1121, 1122.0]], [[1211.0, 1212.0], [1221,\n 1222.0]]], [[[2111.0, 2112.0], [2121, 2122.0]], [[2211.0, 2212.0], [\n 2221, 2222.0]]]])\n', (13456, 13615), False, 'import numpy\n'), ((13755, 13773), 'esys.escript.pdetools.NoPDE', 'NoPDE', (['self.domain'], {}), '(self.domain)\n', (13760, 13773), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((14018, 14036), 'esys.escript.pdetools.NoPDE', 'NoPDE', (['self.domain'], {}), '(self.domain)\n', (14023, 14036), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((14278, 14296), 'esys.escript.pdetools.NoPDE', 'NoPDE', (['self.domain'], {}), '(self.domain)\n', (14283, 14296), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((14552, 14570), 'esys.escript.pdetools.NoPDE', 'NoPDE', (['self.domain'], {}), '(self.domain)\n', (14557, 14570), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((14816, 14834), 'esys.escript.pdetools.NoPDE', 'NoPDE', (['self.domain'], {}), '(self.domain)\n', (14821, 14834), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((15098, 15116), 'esys.escript.pdetools.NoPDE', 'NoPDE', (['self.domain'], {}), '(self.domain)\n', (15103, 15116), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((15394, 15412), 'esys.escript.pdetools.NoPDE', 'NoPDE', (['self.domain'], {}), '(self.domain)\n', (15399, 15412), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((15702, 15720), 'esys.escript.pdetools.NoPDE', 'NoPDE', (['self.domain'], {}), '(self.domain)\n', (15707, 15720), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((16061, 18222), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (16066, 18222), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((19326, 19536), 'numpy.array', 'array', (['[0.41794207085296, 0.031441086046563, 0.882801683420401, 0.807186823427233,\n 0.48950999450145, 0.995486532098031, 0.351243009576568, \n 0.704352576819321, 0.850648989740204, 0.314596738052894]'], {}), '([0.41794207085296, 0.031441086046563, 0.882801683420401, \n 0.807186823427233, 0.48950999450145, 0.995486532098031, \n 0.351243009576568, 0.704352576819321, 0.850648989740204, 0.314596738052894]\n )\n', (19331, 19536), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((19602, 19819), 'numpy.array', 'array', (['[182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -8.7934289814322\n ]'], {}), '([182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -\n 8.7934289814322])\n', (19607, 19819), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((20093, 20162), 'esys.escript.pdetools.PCG', 'PCG', (['(b * 1.0)', 'Ap', '(x_ref * 0.0)', 'Ms', 'dot'], {'atol': '(0)', 'rtol': 'tol', 'iter_max': '(12)'}), '(b * 1.0, Ap, x_ref * 0.0, Ms, dot, atol=0, rtol=tol, iter_max=12)\n', (20096, 20162), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((20397, 22558), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (20402, 22558), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((23662, 23872), 'numpy.array', 'array', (['[0.41794207085296, 0.031441086046563, 0.882801683420401, 0.807186823427233,\n 0.48950999450145, 0.995486532098031, 0.351243009576568, \n 0.704352576819321, 0.850648989740204, 0.314596738052894]'], {}), '([0.41794207085296, 0.031441086046563, 0.882801683420401, \n 0.807186823427233, 0.48950999450145, 0.995486532098031, \n 0.351243009576568, 0.704352576819321, 0.850648989740204, 0.314596738052894]\n )\n', (23667, 23872), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((23938, 24155), 'numpy.array', 'array', (['[182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -8.7934289814322\n ]'], {}), '([182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -\n 8.7934289814322])\n', (23943, 24155), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((24421, 24491), 'esys.escript.pdetools.MINRES', 'MINRES', (['(b * 1.0)', 'Ap', '(x_ref * 0)', 'Ms', 'dot'], {'atol': '(0)', 'rtol': 'tol', 'iter_max': '(12)'}), '(b * 1.0, Ap, x_ref * 0, Ms, dot, atol=0, rtol=tol, iter_max=12)\n', (24427, 24491), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((24660, 26821), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (24665, 26821), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((27925, 28135), 'numpy.array', 'array', (['[0.41794207085296, 0.031441086046563, 0.882801683420401, 0.807186823427233,\n 0.48950999450145, 0.995486532098031, 0.351243009576568, \n 0.704352576819321, 0.850648989740204, 0.314596738052894]'], {}), '([0.41794207085296, 0.031441086046563, 0.882801683420401, \n 0.807186823427233, 0.48950999450145, 0.995486532098031, \n 0.351243009576568, 0.704352576819321, 0.850648989740204, 0.314596738052894]\n )\n', (27930, 28135), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((28201, 28418), 'numpy.array', 'array', (['[182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -8.7934289814322\n ]'], {}), '([182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -\n 8.7934289814322])\n', (28206, 28418), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((28664, 28723), 'esys.escript.pdetools.TFQMR', 'TFQMR', (['b', 'Ap', '(x_ref * 0)', 'dot'], {'atol': '(0)', 'rtol': 'tol', 'iter_max': '(12)'}), '(b, Ap, x_ref * 0, dot, atol=0, rtol=tol, iter_max=12)\n', (28669, 28723), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((28896, 31057), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (28901, 31057), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((32161, 32371), 'numpy.array', 'array', (['[0.41794207085296, 0.031441086046563, 0.882801683420401, 0.807186823427233,\n 0.48950999450145, 0.995486532098031, 0.351243009576568, \n 0.704352576819321, 0.850648989740204, 0.314596738052894]'], {}), '([0.41794207085296, 0.031441086046563, 0.882801683420401, \n 0.807186823427233, 0.48950999450145, 0.995486532098031, \n 0.351243009576568, 0.704352576819321, 0.850648989740204, 0.314596738052894]\n )\n', (32166, 32371), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((32437, 32654), 'numpy.array', 'array', (['[182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -8.7934289814322\n ]'], {}), '([182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -\n 8.7934289814322])\n', (32442, 32654), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((32894, 32953), 'esys.escript.pdetools.GMRES', 'GMRES', (['b', 'Ap', '(x_ref * 0)', 'dot'], {'atol': '(0)', 'rtol': 'tol', 'iter_max': '(12)'}), '(b, Ap, x_ref * 0, dot, atol=0, rtol=tol, iter_max=12)\n', (32899, 32953), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((33130, 35291), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (33135, 35291), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((36395, 36605), 'numpy.array', 'array', (['[0.41794207085296, 0.031441086046563, 0.882801683420401, 0.807186823427233,\n 0.48950999450145, 0.995486532098031, 0.351243009576568, \n 0.704352576819321, 0.850648989740204, 0.314596738052894]'], {}), '([0.41794207085296, 0.031441086046563, 0.882801683420401, \n 0.807186823427233, 0.48950999450145, 0.995486532098031, \n 0.351243009576568, 0.704352576819321, 0.850648989740204, 0.314596738052894]\n )\n', (36400, 36605), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((36671, 36888), 'numpy.array', 'array', (['[182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -8.7934289814322\n ]'], {}), '([182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -\n 8.7934289814322])\n', (36676, 36888), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((37161, 37230), 'esys.escript.pdetools.GMRES', 'GMRES', (['b', 'Ap', '(x_ref * 0)', 'dot'], {'atol': '(0)', 'rtol': 'tol', 'iter_max': '(12)', 'P_R': 'P_Rp'}), '(b, Ap, x_ref * 0, dot, atol=0, rtol=tol, iter_max=12, P_R=P_Rp)\n', (37166, 37230), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((62138, 62163), 'esys.escript.pdetools.ArithmeticTuple', 'ArithmeticTuple', (['(1.0)', '(2.0)'], {}), '(1.0, 2.0)\n', (62153, 62163), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((62915, 62940), 'esys.escript.pdetools.ArithmeticTuple', 'ArithmeticTuple', (['(3.0)', '(4.0)'], {}), '(3.0, 4.0)\n', (62930, 62940), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((63271, 63317), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (63280, 63317), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((63583, 63629), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (63592, 63629), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((63906, 63952), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (63915, 63952), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((64246, 64292), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (64255, 64292), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((64617, 64663), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (64626, 64663), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((65079, 65125), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (65088, 65125), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((65408, 65454), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (65417, 65454), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((65466, 65494), 'numpy.array', 'numpy.array', (['[1.0, 2.0, 3.0]'], {}), '([1.0, 2.0, 3.0])\n', (65477, 65494), False, 'import numpy\n'), ((65758, 65804), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (65767, 65804), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((65816, 65855), 'numpy.array', 'numpy.array', (['[[11.0, 12.0], [21, 22.0]]'], {}), '([[11.0, 12.0], [21, 22.0]])\n', (65827, 65855), False, 'import numpy\n'), ((66118, 66164), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (66127, 66164), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((66176, 66253), 'numpy.array', 'numpy.array', (['[[[111.0, 112.0], [121, 122.0]], [[211.0, 212.0], [221, 222.0]]]'], {}), '([[[111.0, 112.0], [121, 122.0]], [[211.0, 212.0], [221, 222.0]]])\n', (66187, 66253), False, 'import numpy\n'), ((66509, 66555), 'esys.escript.pdetools.Projector', 'Projector', (['self.domain'], {'reduce': '(True)', 'fast': '(True)'}), '(self.domain, reduce=True, fast=True)\n', (66518, 66555), False, 'from esys.escript.pdetools import Locator, Projector, TimeIntegrationManager, NoPDE, PCG, ArithmeticTuple, GMRES, MINRES, TFQMR, HomogeneousSaddlePointProblem\n'), ((66567, 66737), 'numpy.array', 'numpy.array', (['[[[[1111.0, 1112.0], [1121, 1122.0]], [[1211.0, 1212.0], [1221, 1222.0]]],\n [[[2111.0, 2112.0], [2121, 2122.0]], [[2211.0, 2212.0], [2221, 2222.0]]]]'], {}), '([[[[1111.0, 1112.0], [1121, 1122.0]], [[1211.0, 1212.0], [1221,\n 1222.0]]], [[[2111.0, 2112.0], [2121, 2122.0]], [[2211.0, 2212.0], [\n 2221, 2222.0]]]])\n', (66578, 66737), False, 'import numpy\n'), ((19907, 19916), 'numpy.dot', 'dot', (['A', 'x'], {}), '(A, x)\n', (19910, 19916), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((19947, 19972), 'numpy.zeros', 'zeros', (['(b.size,)', 'float64'], {}), '((b.size,), float64)\n', (19952, 19972), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((24243, 24252), 'numpy.dot', 'dot', (['A', 'x'], {}), '(A, x)\n', (24246, 24252), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((28503, 28512), 'numpy.dot', 'dot', (['A', 'x'], {}), '(A, x)\n', (28506, 28512), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((28633, 28640), 'numpy.size', 'size', (['b'], {}), '(b)\n', (28637, 28640), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((32737, 32746), 'numpy.dot', 'dot', (['A', 'x'], {}), '(A, x)\n', (32740, 32746), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((32863, 32870), 'numpy.size', 'size', (['b'], {}), '(b)\n', (32867, 32870), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((36976, 36985), 'numpy.dot', 'dot', (['A', 'x'], {}), '(A, x)\n', (36979, 36985), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((14915, 14930), 'numpy.ones', 'numpy.ones', (['[2]'], {}), '([2])\n', (14925, 14930), False, 'import numpy\n'), ((15197, 15212), 'numpy.ones', 'numpy.ones', (['[2]'], {}), '([2])\n', (15207, 15212), False, 'import numpy\n'), ((15215, 15230), 'numpy.ones', 'numpy.ones', (['[2]'], {}), '([2])\n', (15225, 15230), False, 'import numpy\n'), ((15493, 15508), 'numpy.ones', 'numpy.ones', (['[2]'], {}), '([2])\n', (15503, 15508), False, 'import numpy\n'), ((15511, 15526), 'numpy.ones', 'numpy.ones', (['[2]'], {}), '([2])\n', (15521, 15526), False, 'import numpy\n'), ((19997, 20004), 'numpy.size', 'size', (['b'], {}), '(b)\n', (20001, 20004), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((24334, 24341), 'numpy.size', 'size', (['b'], {}), '(b)\n', (24338, 24341), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((28537, 28544), 'numpy.size', 'size', (['x'], {}), '(x)\n', (28541, 28544), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((32771, 32778), 'numpy.size', 'size', (['b'], {}), '(b)\n', (32775, 32778), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((37024, 37031), 'numpy.size', 'size', (['x'], {}), '(x)\n', (37028, 37031), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((37067, 37074), 'numpy.size', 'size', (['x'], {}), '(x)\n', (37071, 37074), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((37535, 39696), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (37540, 39696), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((41305, 41515), 'numpy.array', 'array', (['[0.41794207085296, 0.031441086046563, 0.882801683420401, 0.807186823427233,\n 0.48950999450145, 0.995486532098031, 0.351243009576568, \n 0.704352576819321, 0.850648989740204, 0.314596738052894]'], {}), '([0.41794207085296, 0.031441086046563, 0.882801683420401, \n 0.807186823427233, 0.48950999450145, 0.995486532098031, \n 0.351243009576568, 0.704352576819321, 0.850648989740204, 0.314596738052894]\n )\n', (41310, 41515), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((41596, 41813), 'numpy.array', 'array', (['[182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -8.7934289814322\n ]'], {}), '([182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -\n 8.7934289814322])\n', (41601, 41813), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((42116, 42127), 'numpy.dot', 'dot', (['x0', 'x1'], {}), '(x0, x1)\n', (42119, 42127), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((42598, 44759), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (42603, 44759), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((46368, 46578), 'numpy.array', 'array', (['[0.41794207085296, 0.031441086046563, 0.882801683420401, 0.807186823427233,\n 0.48950999450145, 0.995486532098031, 0.351243009576568, \n 0.704352576819321, 0.850648989740204, 0.314596738052894]'], {}), '([0.41794207085296, 0.031441086046563, 0.882801683420401, \n 0.807186823427233, 0.48950999450145, 0.995486532098031, \n 0.351243009576568, 0.704352576819321, 0.850648989740204, 0.314596738052894]\n )\n', (46373, 46578), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((46659, 46876), 'numpy.array', 'array', (['[182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -8.7934289814322\n ]'], {}), '([182.91102396026295, -1.048322041992754, 44.1812938752062, \n 30.344553414038817, 15.247917439094513, 24.060664905403492, \n 27.210293789825833, 47.12206774407584, 199.26713641785685, -\n 8.7934289814322])\n', (46664, 46876), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((47179, 47190), 'numpy.dot', 'dot', (['x0', 'x1'], {}), '(x0, x1)\n', (47182, 47190), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((47649, 49810), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (47654, 49810), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((51614, 51827), 'numpy.array', 'array', (['[0.100225501676291, -0.308862704993209, 0.064097238997721, \n 0.253012436539738, -0.346223308561905, 0.2425508275422, -\n 0.194695862196008, 0.09451439391473, 0.302961126826511, -0.236043777597633]'], {}), '([0.100225501676291, -0.308862704993209, 0.064097238997721, \n 0.253012436539738, -0.346223308561905, 0.2425508275422, -\n 0.194695862196008, 0.09451439391473, 0.302961126826511, -0.236043777597633]\n )\n', (51619, 51827), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((51944, 52579), 'numpy.array', 'array', (['[[0.01627853113636, 0.06688235764255, 0.004870689484614], [\n 0.062879587145773, 0.038798770300146, 0.022155850155616], [\n 0.09312121957248, 0.110244632756116, 0.14053347386784], [\n 0.059000597728388, 0.090986953740106, 0.035316011834982], [\n 0.091209362659698, 0.13205572801294, 0.069462874306956], [\n 0.077790176986096, 0.133626423045765, 0.011149969846981], [\n 0.01407283482513, 0.094910926488907, 0.133498532648644], [\n 0.025728916673085, 0.102542818811672, 0.13657268163218], [\n 0.071254288170748, 0.071738715618163, 0.078005951991733], [\n 0.049463014576779, 0.103559223780991, 0.003356415647637]]'], {}), '([[0.01627853113636, 0.06688235764255, 0.004870689484614], [\n 0.062879587145773, 0.038798770300146, 0.022155850155616], [\n 0.09312121957248, 0.110244632756116, 0.14053347386784], [\n 0.059000597728388, 0.090986953740106, 0.035316011834982], [\n 0.091209362659698, 0.13205572801294, 0.069462874306956], [\n 0.077790176986096, 0.133626423045765, 0.011149969846981], [\n 0.01407283482513, 0.094910926488907, 0.133498532648644], [\n 0.025728916673085, 0.102542818811672, 0.13657268163218], [\n 0.071254288170748, 0.071738715618163, 0.078005951991733], [\n 0.049463014576779, 0.103559223780991, 0.003356415647637]])\n', (51949, 52579), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((52841, 52905), 'numpy.array', 'array', (['[2.580984952252628, 4.054090902056985, 0.935138168128546]'], {}), '([2.580984952252628, 4.054090902056985, 0.935138168128546])\n', (52846, 52905), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((52930, 53149), 'numpy.array', 'array', (['[123.32277536758224, -51.55620665556457, 16.220697868056913, \n 6.512480714694167, -5.727371407390975, 4.802494840775022, -\n 4.171606044721161, -1.862366353566293, 74.8502261632571, -\n 118.60246465707644]'], {}), '([123.32277536758224, -51.55620665556457, 16.220697868056913, \n 6.512480714694167, -5.727371407390975, 4.802494840775022, -\n 4.171606044721161, -1.862366353566293, 74.8502261632571, -\n 118.60246465707644])\n', (52935, 53149), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((53261, 53461), 'numpy.array', 'array', (['[[9313.705360982807, -5755.536981691271, 806.2892455897337], [-\n 5755.536981691272, 4606.321002756208, -1630.5061963566093], [\n 806.2892455897335, -1630.506196356609, 2145.6503581638895]]'], {}), '([[9313.705360982807, -5755.536981691271, 806.2892455897337], [-\n 5755.536981691272, 4606.321002756208, -1630.5061963566093], [\n 806.2892455897335, -1630.506196356609, 2145.6503581638895]])\n', (53266, 53461), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((53600, 53610), 'numpy.dot', 'dot', (['p', 'Bv'], {}), '(p, Bv)\n', (53603, 53610), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((53746, 53757), 'numpy.dot', 'dot', (['p0', 'p1'], {}), '(p0, p1)\n', (53749, 53757), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((54994, 57155), 'numpy.array', 'array', (['[[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]]'], {}), '([[475.2141253159452, -0.2391895572674098, 0.5834798554135237, -\n 3.704394311709722, 5.765369186984777, -13.09786358737351, \n 25.22087134507148, -33.93956279045637, 104.685691477083, -\n 244.776419084954], [-0.2391895572674098, 125.6797283910693, -\n 0.9188270412920813, 1.300169538880688, -0.5353714719231424, \n 2.674709444667012, -11.1609784126958, 28.01193427514478, -\n 38.77806125898224, 30.63505753648256], [0.5834798554135237, -\n 0.9188270412920813, 62.40841811806843, -0.8176289504109282, \n 0.1447935098417076, -0.9721424148655324, 0.6713551574117577, -\n 3.656297654168375, 7.015141656913973, -41.9552593215625], [-\n 3.704394311709722, 1.300169538880688, -0.8176289504109282, \n 36.04980536782198, -0.6241238423759328, 1.142345320047869, -\n 3.438816797096519, 5.85485748136747, -4.524311288596452, \n 11.36590280389803], [5.765369186984777, -0.5353714719231424, \n 0.1447935098417076, -0.6241238423759328, 29.53997190215862, -\n 0.9474729233464712, 1.883516378345809, -1.90627476570423, \n 4.401859671778645, -10.64573816075257], [-13.09786358737351, \n 2.674709444667012, -0.9721424148655324, 1.142345320047869, -\n 0.9474729233464712, 28.76998216302979, -0.4853065259692995, \n 0.7088596468102618, -0.8972224295152829, 5.228606946522749], [\n 25.22087134507148, -11.1609784126958, 0.6713551574117577, -\n 3.438816797096519, 1.883516378345809, -0.4853065259692995, \n 51.21175860935919, -0.3523133115905478, 1.782136702229135, -\n 1.560849559916187], [-33.93956279045637, 28.01193427514478, -\n 3.656297654168375, 5.85485748136747, -1.90627476570423, \n 0.7088596468102618, -0.3523133115905478, 84.11681423853814, -\n 0.5238590858177903, 1.515872114883926], [104.685691477083, -\n 38.77806125898224, 7.015141656913973, -4.524311288596452, \n 4.401859671778645, -0.8972224295152829, 1.782136702229135, -\n 0.5238590858177903, 179.7889693808014, -0.8362340479938084], [-\n 244.776419084954, 30.63505753648256, -41.9552593215625, \n 11.36590280389803, -10.64573816075257, 5.228606946522749, -\n 1.560849559916187, 1.515872114883926, -0.8362340479938084, \n 383.371933534663]])\n', (54999, 57155), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((58959, 59172), 'numpy.array', 'array', (['[0.100225501676291, -0.308862704993209, 0.064097238997721, \n 0.253012436539738, -0.346223308561905, 0.2425508275422, -\n 0.194695862196008, 0.09451439391473, 0.302961126826511, -0.236043777597633]'], {}), '([0.100225501676291, -0.308862704993209, 0.064097238997721, \n 0.253012436539738, -0.346223308561905, 0.2425508275422, -\n 0.194695862196008, 0.09451439391473, 0.302961126826511, -0.236043777597633]\n )\n', (58964, 59172), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((59289, 59924), 'numpy.array', 'array', (['[[0.01627853113636, 0.06688235764255, 0.004870689484614], [\n 0.062879587145773, 0.038798770300146, 0.022155850155616], [\n 0.09312121957248, 0.110244632756116, 0.14053347386784], [\n 0.059000597728388, 0.090986953740106, 0.035316011834982], [\n 0.091209362659698, 0.13205572801294, 0.069462874306956], [\n 0.077790176986096, 0.133626423045765, 0.011149969846981], [\n 0.01407283482513, 0.094910926488907, 0.133498532648644], [\n 0.025728916673085, 0.102542818811672, 0.13657268163218], [\n 0.071254288170748, 0.071738715618163, 0.078005951991733], [\n 0.049463014576779, 0.103559223780991, 0.003356415647637]]'], {}), '([[0.01627853113636, 0.06688235764255, 0.004870689484614], [\n 0.062879587145773, 0.038798770300146, 0.022155850155616], [\n 0.09312121957248, 0.110244632756116, 0.14053347386784], [\n 0.059000597728388, 0.090986953740106, 0.035316011834982], [\n 0.091209362659698, 0.13205572801294, 0.069462874306956], [\n 0.077790176986096, 0.133626423045765, 0.011149969846981], [\n 0.01407283482513, 0.094910926488907, 0.133498532648644], [\n 0.025728916673085, 0.102542818811672, 0.13657268163218], [\n 0.071254288170748, 0.071738715618163, 0.078005951991733], [\n 0.049463014576779, 0.103559223780991, 0.003356415647637]])\n', (59294, 59924), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((60186, 60250), 'numpy.array', 'array', (['[2.580984952252628, 4.054090902056985, 0.935138168128546]'], {}), '([2.580984952252628, 4.054090902056985, 0.935138168128546])\n', (60191, 60250), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((60275, 60494), 'numpy.array', 'array', (['[123.32277536758224, -51.55620665556457, 16.220697868056913, \n 6.512480714694167, -5.727371407390975, 4.802494840775022, -\n 4.171606044721161, -1.862366353566293, 74.8502261632571, -\n 118.60246465707644]'], {}), '([123.32277536758224, -51.55620665556457, 16.220697868056913, \n 6.512480714694167, -5.727371407390975, 4.802494840775022, -\n 4.171606044721161, -1.862366353566293, 74.8502261632571, -\n 118.60246465707644])\n', (60280, 60494), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((60606, 60806), 'numpy.array', 'array', (['[[9313.705360982807, -5755.536981691271, 806.2892455897337], [-\n 5755.536981691272, 4606.321002756208, -1630.5061963566093], [\n 806.2892455897335, -1630.506196356609, 2145.6503581638895]]'], {}), '([[9313.705360982807, -5755.536981691271, 806.2892455897337], [-\n 5755.536981691272, 4606.321002756208, -1630.5061963566093], [\n 806.2892455897335, -1630.506196356609, 2145.6503581638895]])\n', (60611, 60806), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((60945, 60955), 'numpy.dot', 'dot', (['p', 'Bv'], {}), '(p, Bv)\n', (60948, 60955), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((61091, 61102), 'numpy.dot', 'dot', (['p0', 'p1'], {}), '(p0, p1)\n', (61094, 61102), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((24290, 24297), 'numpy.size', 'size', (['b'], {}), '(b)\n', (24294, 24297), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((41928, 41942), 'numpy.dot', 'dot', (['self.A', 'x'], {}), '(self.A, x)\n', (41931, 41942), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((41978, 41990), 'numpy.size', 'size', (['self.b'], {}), '(self.b)\n', (41982, 41990), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((46991, 47005), 'numpy.dot', 'dot', (['self.A', 'x'], {}), '(self.A, x)\n', (46994, 47005), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((47041, 47053), 'numpy.size', 'size', (['self.b'], {}), '(self.b)\n', (47045, 47053), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((53815, 53824), 'numpy.dot', 'dot', (['v', 'v'], {}), '(v, v)\n', (53818, 53824), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((54041, 54052), 'numpy.dot', 'dot', (['Bv', 'Bv'], {}), '(Bv, Bv)\n', (54044, 54052), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((54145, 54160), 'numpy.dot', 'dot', (['self.Bt', 'p'], {}), '(self.Bt, p)\n', (54148, 54160), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((54292, 54301), 'numpy.size', 'size', (['out'], {}), '(out)\n', (54296, 54301), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((61160, 61169), 'numpy.dot', 'dot', (['v', 'v'], {}), '(v, v)\n', (61163, 61169), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((61386, 61397), 'numpy.dot', 'dot', (['Bv', 'Bv'], {}), '(Bv, Bv)\n', (61389, 61397), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((61490, 61505), 'numpy.dot', 'dot', (['self.Bt', 'p'], {}), '(self.Bt, p)\n', (61493, 61505), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((61637, 61646), 'numpy.size', 'size', (['out'], {}), '(out)\n', (61641, 61646), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((20243, 20252), 'numpy.dot', 'dot', (['A', 'x'], {}), '(A, x)\n', (20246, 20252), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((53933, 53947), 'numpy.dot', 'dot', (['self.A', 'v'], {}), '(self.A, v)\n', (53936, 53947), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((61278, 61292), 'numpy.dot', 'dot', (['self.A', 'v'], {}), '(self.A, v)\n', (61281, 61292), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((53918, 53933), 'numpy.dot', 'dot', (['self.Bt', 'p'], {}), '(self.Bt, p)\n', (53921, 53933), False, 'from numpy import array, prod, dot, zeros, size, float64\n'), ((61263, 61278), 'numpy.dot', 'dot', (['self.Bt', 'p'], {}), '(self.Bt, p)\n', (61266, 61278), False, 'from numpy import array, prod, dot, zeros, size, float64\n')]
|
from .pondering import proximity_ponderation,outlier_ponderation
from .fitting import fit
import numpy as np
import matplotlib.pyplot as plt
def plot(points,res=256):
#plots each point with it's ponderation, and the function
fig, ax1 = plt.subplots()
points= np.array(points)
xpoints= points[:,0]
ypoints= points[:,1]
ponder= proximity_ponderation(xpoints)*outlier_ponderation(ypoints)
fit_res= fit(points)
fit_func= fit_res[-1]
xpointsfit= np.linspace(np.amin(xpoints),np.amax(xpoints),res)
ypointsfit= fit_func(xpointsfit)
ax1.plot(xpoints, ypoints, 'ro', label="observations")
ax1.plot(xpointsfit, ypointsfit, 'g-',label="fitting")
ax1.set_xlabel('date (HJD)')
ax2 = ax1.twinx()
ax1.set_ylabel('Magnitude')
ax2.plot(xpoints, ponder, '-bs',label="ponderation")
ax2.set_ylabel('Ponderation')
ax2.set_ylim(ymin=0)
ax1.legend(loc=3)
ax2.legend(loc=4)
plt.show()
|
[
"matplotlib.pyplot.show",
"numpy.amin",
"numpy.amax",
"numpy.array",
"matplotlib.pyplot.subplots"
] |
[((246, 260), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (258, 260), True, 'import matplotlib.pyplot as plt\n'), ((274, 290), 'numpy.array', 'np.array', (['points'], {}), '(points)\n', (282, 290), True, 'import numpy as np\n'), ((941, 951), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (949, 951), True, 'import matplotlib.pyplot as plt\n'), ((493, 509), 'numpy.amin', 'np.amin', (['xpoints'], {}), '(xpoints)\n', (500, 509), True, 'import numpy as np\n'), ((510, 526), 'numpy.amax', 'np.amax', (['xpoints'], {}), '(xpoints)\n', (517, 526), True, 'import numpy as np\n')]
|
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.nn.functional as F
from convgru import ConvGRUCell
from gen_models import make_conv_net, make_fc_net, make_upconv_net
import rlkit.torch.pytorch_util as ptu
import numpy as np
from numpy import pi
from numpy import log as np_log
log_2pi = np_log(2*pi)
LOG_COV_MAX = 2
LOG_COV_MIN = -5
class MaskedNormalVAE(nn.Module):
def __init__(
self,
maze_dims,
z_dim,
encoder_specs,
decoder_specs
):
super().__init__()
in_ch = maze_dims[0]
in_h = maze_dims[1]
# make the encoder
self.encoder_conv_seq, x_enc_ch, x_enc_h = make_conv_net(in_ch, in_h, encoder_specs['conv_part_specs'])
self.x_enc_ch = x_enc_ch
self.x_enc_h = x_enc_h
flat_inter_img_dim = x_enc_ch * x_enc_h * x_enc_h
self.z_mask_conv_seq, _, _ = make_conv_net(
x_enc_ch, x_enc_h,
{
'kernel_sizes': [3],
'num_channels': [64],
'strides': [1],
'paddings': [1],
'use_bn': True
}
)
self.z_mask_fc_seq, _ = make_fc_net(64*x_enc_h*x_enc_h, {'hidden_sizes': [1024], 'use_bn':True})
self.z_mask_fc = nn.Linear(1024, 128, bias=True)
self.z_mask_gen_fc_seq, _ = make_fc_net(128, {'hidden_sizes': [1024, 4*x_enc_h*x_enc_h], 'use_bn':True})
self.z_mask_gen_conv = nn.Conv2d(4, 1, 3, stride=1, padding=1, bias=True)
self.encoder_fc_seq, h_dim = make_fc_net(flat_inter_img_dim, encoder_specs['fc_part_specs'])
self.z_mean_fc = nn.Linear(h_dim, z_dim, bias=True)
self.z_log_cov_fc = nn.Linear(h_dim, z_dim, bias=True)
# make the decoder
self.decoder_fc_seq, h_dim = make_fc_net(z_dim, decoder_specs['fc_part_specs'])
# assert h_dim == flat_inter_img_dim
self.decoder_upconv_seq, out_ch, out_h = make_upconv_net(x_enc_ch, x_enc_h, decoder_specs['upconv_part_specs'])
self.recon_mean_conv = nn.Conv2d(out_ch, 1, 1, stride=1, padding=0, bias=True)
self.recon_log_cov_conv = nn.Conv2d(out_ch, 1, 1, stride=1, padding=0, bias=True)
assert out_h == maze_dims[1], str(out_h) + ' != ' + str(maze_dims[1])
def forward(self, img_batch):
enc = self.encoder_conv_seq(img_batch)
hidden = self.z_mask_conv_seq(enc)
hidden = hidden.view(hidden.size(0), -1)
hidden = self.z_mask_fc_seq(hidden)
z_mask = self.z_mask_fc(hidden)
hidden = self.z_mask_gen_fc_seq(z_mask)
hidden = hidden.view(hidden.size(0), 4, self.x_enc_h, self.x_enc_h)
mask = self.z_mask_gen_conv(hidden)
mask = torch.sigmoid(mask)
enc = enc*mask
enc = enc.view(enc.size(0), -1)
enc = self.encoder_fc_seq(enc)
z_mean = self.z_mean_fc(enc)
z_log_cov = self.z_log_cov_fc(enc)
z_log_cov = torch.clamp(z_log_cov, LOG_COV_MIN, LOG_COV_MAX)
z_sample = z_mean
# ----------------
# eps = Variable(torch.randn(z_mean.size()))
# if z_mean.is_cuda: eps = eps.cuda()
# z_sample = z_mean + eps*torch.exp(0.5 * z_log_cov)
dec = self.decoder_fc_seq(z_sample)
dec = dec.view(dec.size(0), self.x_enc_ch, self.x_enc_h, self.x_enc_h)
dec = dec * mask
dec = self.decoder_upconv_seq(dec)
# ----------------
# dec = self.decoder_fc_seq(z_sample)
# dec = z_sample
recon_mean = self.recon_mean_conv(dec)
recon_mean = torch.sigmoid(recon_mean)
recon_log_cov = self.recon_log_cov_conv(dec)
recon_log_cov = torch.clamp(recon_log_cov, LOG_COV_MIN, LOG_COV_MAX)
return z_mean, z_log_cov, recon_mean, recon_log_cov, mask, None
def compute_KL(self, post_mean, post_log_cov):
return -0.5 * torch.sum(
1 + post_log_cov - post_mean**2 - torch.exp(post_log_cov)
)
def compute_log_prob(self, recon_mean, recon_log_cov, obs_batch):
recon_mean = recon_mean.view(recon_mean.size(0), -1)
recon_log_cov = recon_log_cov.view(recon_log_cov.size(0), -1)
obs_batch = obs_batch.view(obs_batch.size(0), -1)
recon_cov = torch.exp(recon_log_cov)
# log_prob = -0.5 * torch.sum(
# (recon_mean - obs_batch)**2 / recon_cov
# )
# log_det_temp = torch.sum(recon_log_cov, 1) + log_2pi
# log_prob += -0.5 * torch.sum(log_det_temp)
log_prob = -0.5 * torch.sum((recon_mean - obs_batch)**2)
return log_prob
def compute_ELBO(
self,
post_mean, post_log_cov,
recon_mean, recon_log_cov,
obs_batch,
average_over_batch=True
):
KL = self.compute_KL(post_mean, post_log_cov)
log_prob = self.compute_log_prob(recon_mean, recon_log_cov, obs_batch)
elbo = log_prob - 0. * KL
if average_over_batch: elbo = elbo / float(obs_batch.size(0))
return elbo, KL
|
[
"numpy.log",
"torch.nn.Conv2d",
"gen_models.make_conv_net",
"gen_models.make_upconv_net",
"torch.sigmoid",
"torch.clamp",
"gen_models.make_fc_net",
"torch.exp",
"torch.nn.Linear",
"torch.sum"
] |
[((326, 340), 'numpy.log', 'np_log', (['(2 * pi)'], {}), '(2 * pi)\n', (332, 340), True, 'from numpy import log as np_log\n'), ((698, 758), 'gen_models.make_conv_net', 'make_conv_net', (['in_ch', 'in_h', "encoder_specs['conv_part_specs']"], {}), "(in_ch, in_h, encoder_specs['conv_part_specs'])\n", (711, 758), False, 'from gen_models import make_conv_net, make_fc_net, make_upconv_net\n'), ((920, 1050), 'gen_models.make_conv_net', 'make_conv_net', (['x_enc_ch', 'x_enc_h', "{'kernel_sizes': [3], 'num_channels': [64], 'strides': [1], 'paddings': [1],\n 'use_bn': True}"], {}), "(x_enc_ch, x_enc_h, {'kernel_sizes': [3], 'num_channels': [64],\n 'strides': [1], 'paddings': [1], 'use_bn': True})\n", (933, 1050), False, 'from gen_models import make_conv_net, make_fc_net, make_upconv_net\n'), ((1207, 1284), 'gen_models.make_fc_net', 'make_fc_net', (['(64 * x_enc_h * x_enc_h)', "{'hidden_sizes': [1024], 'use_bn': True}"], {}), "(64 * x_enc_h * x_enc_h, {'hidden_sizes': [1024], 'use_bn': True})\n", (1218, 1284), False, 'from gen_models import make_conv_net, make_fc_net, make_upconv_net\n'), ((1305, 1336), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(128)'], {'bias': '(True)'}), '(1024, 128, bias=True)\n', (1314, 1336), True, 'import torch.nn as nn\n'), ((1374, 1460), 'gen_models.make_fc_net', 'make_fc_net', (['(128)', "{'hidden_sizes': [1024, 4 * x_enc_h * x_enc_h], 'use_bn': True}"], {}), "(128, {'hidden_sizes': [1024, 4 * x_enc_h * x_enc_h], 'use_bn': \n True})\n", (1385, 1460), False, 'from gen_models import make_conv_net, make_fc_net, make_upconv_net\n'), ((1482, 1532), 'torch.nn.Conv2d', 'nn.Conv2d', (['(4)', '(1)', '(3)'], {'stride': '(1)', 'padding': '(1)', 'bias': '(True)'}), '(4, 1, 3, stride=1, padding=1, bias=True)\n', (1491, 1532), True, 'import torch.nn as nn\n'), ((1571, 1634), 'gen_models.make_fc_net', 'make_fc_net', (['flat_inter_img_dim', "encoder_specs['fc_part_specs']"], {}), "(flat_inter_img_dim, encoder_specs['fc_part_specs'])\n", (1582, 1634), False, 'from gen_models import make_conv_net, make_fc_net, make_upconv_net\n'), ((1661, 1695), 'torch.nn.Linear', 'nn.Linear', (['h_dim', 'z_dim'], {'bias': '(True)'}), '(h_dim, z_dim, bias=True)\n', (1670, 1695), True, 'import torch.nn as nn\n'), ((1724, 1758), 'torch.nn.Linear', 'nn.Linear', (['h_dim', 'z_dim'], {'bias': '(True)'}), '(h_dim, z_dim, bias=True)\n', (1733, 1758), True, 'import torch.nn as nn\n'), ((1824, 1874), 'gen_models.make_fc_net', 'make_fc_net', (['z_dim', "decoder_specs['fc_part_specs']"], {}), "(z_dim, decoder_specs['fc_part_specs'])\n", (1835, 1874), False, 'from gen_models import make_conv_net, make_fc_net, make_upconv_net\n'), ((1969, 2039), 'gen_models.make_upconv_net', 'make_upconv_net', (['x_enc_ch', 'x_enc_h', "decoder_specs['upconv_part_specs']"], {}), "(x_enc_ch, x_enc_h, decoder_specs['upconv_part_specs'])\n", (1984, 2039), False, 'from gen_models import make_conv_net, make_fc_net, make_upconv_net\n'), ((2072, 2127), 'torch.nn.Conv2d', 'nn.Conv2d', (['out_ch', '(1)', '(1)'], {'stride': '(1)', 'padding': '(0)', 'bias': '(True)'}), '(out_ch, 1, 1, stride=1, padding=0, bias=True)\n', (2081, 2127), True, 'import torch.nn as nn\n'), ((2162, 2217), 'torch.nn.Conv2d', 'nn.Conv2d', (['out_ch', '(1)', '(1)'], {'stride': '(1)', 'padding': '(0)', 'bias': '(True)'}), '(out_ch, 1, 1, stride=1, padding=0, bias=True)\n', (2171, 2217), True, 'import torch.nn as nn\n'), ((2738, 2757), 'torch.sigmoid', 'torch.sigmoid', (['mask'], {}), '(mask)\n', (2751, 2757), False, 'import torch\n'), ((2961, 3009), 'torch.clamp', 'torch.clamp', (['z_log_cov', 'LOG_COV_MIN', 'LOG_COV_MAX'], {}), '(z_log_cov, LOG_COV_MIN, LOG_COV_MAX)\n', (2972, 3009), False, 'import torch\n'), ((3584, 3609), 'torch.sigmoid', 'torch.sigmoid', (['recon_mean'], {}), '(recon_mean)\n', (3597, 3609), False, 'import torch\n'), ((3687, 3739), 'torch.clamp', 'torch.clamp', (['recon_log_cov', 'LOG_COV_MIN', 'LOG_COV_MAX'], {}), '(recon_log_cov, LOG_COV_MIN, LOG_COV_MAX)\n', (3698, 3739), False, 'import torch\n'), ((4268, 4292), 'torch.exp', 'torch.exp', (['recon_log_cov'], {}), '(recon_log_cov)\n', (4277, 4292), False, 'import torch\n'), ((4542, 4582), 'torch.sum', 'torch.sum', (['((recon_mean - obs_batch) ** 2)'], {}), '((recon_mean - obs_batch) ** 2)\n', (4551, 4582), False, 'import torch\n'), ((3949, 3972), 'torch.exp', 'torch.exp', (['post_log_cov'], {}), '(post_log_cov)\n', (3958, 3972), False, 'import torch\n')]
|
import tensorflow as tf
import numpy as np
import cv2
import os
import argparse
parser = argparse.ArgumentParser(description='Shade Sketches')
parser.add_argument('--image-size', type=int, default=320,
help='input image size (default: 320)')
parser.add_argument('--direction', type=str, default='810',
help='lighting directions (suggest to choose 810, 210, 710)')
parser.add_argument('--dir', type=str, default='810',
help='dir')
args = parser.parse_args()
def cond_to_pos(cond):
cond_pos_rel = {
'002': [0, 0, -1],
'110': [0, 1, -1], '210': [1, 1, -1], '310': [1, 0, -1], '410': [1, -1, -1], '510': [0, -1, -1],
'610': [-1, -1, -1], '710': [-1, 0, -1], '810': [-1, 1, -1],
'120': [0, 1, 0], '220': [1, 1, 0], '320': [1, 0, 0], '420': [1, -1, 0], '520': [0, -1, 0], '620': [-1, -1, 0],
'720': [-1, 0, 0], '820': [-1, 1, 0],
'130': [0, 1, 1], '230': [1, 1, 1], '330': [1, 0, 1], '430': [1, -1, 1], '530': [0, -1, 1], '630': [-1, -1, 1],
'730': [-1, 0, 1], '830': [-1, 1, 1],
'001': [0, 0, 1]
}
return cond_pos_rel[cond]
if not os.path.exists('norm/'):
os.makedirs('norm/')
if not os.path.exists('out/'):
os.makedirs('out/')
# Line norm
with tf.Graph().as_default():
output_graph_def = tf.compat.v1.GraphDef()
with open("linenorm.pb", "rb") as f:
output_graph_def.ParseFromString(f.read())
tensors = tf.import_graph_def(output_graph_def, name="")
with tf.compat.v1.Session() as sess:
init = tf.compat.v1.global_variables_initializer()
sess.run(init)
op = sess.graph.get_operations()
for i, m in enumerate(op):
print('op{}:'.format(i), m.values())
inputs = sess.graph.get_tensor_by_name('input_1:0')
outputs = sess.graph.get_tensor_by_name('conv2d_9/Sigmoid:0')
s = args.image_size
for root, dirs, files in os.walk('val/', topdown=False):
for name in files:
if name==args.dir :
line_path = os.path.join(root, name)
print(line_path)
img = cv2.imread(line_path, cv2.IMREAD_GRAYSCALE)
img = cv2.resize(img, (s, s))
img = img.astype(np.float32) / 255.
img_out = sess.run(outputs, {inputs: np.reshape(img, (1, img.shape[0], img.shape[1], 1))})
cv2.imwrite(os.path.join('norm/', name), np.squeeze(img_out) * 255.)
# Line shade
with tf.Graph().as_default():
output_graph_def = tf.compat.v1.GraphDef()
with open("lineshader.pb", "rb") as f:
output_graph_def.ParseFromString(f.read())
tensors = tf.import_graph_def(output_graph_def, name="")
with tf.compat.v1.Session() as sess:
init = tf.compat.v1.global_variables_initializer()
sess.run(init)
inputs1 = sess.graph.get_tensor_by_name('input_1:0')
inputs2 = sess.graph.get_tensor_by_name('input_2:0')
outputs = sess.graph.get_tensor_by_name('conv2d_139/Tanh:0')
s = args.image_size
for root, dirs, files in os.walk('norm/', topdown=False):
for name in files:
if name == args.dir :
norm_path = os.path.join(root, name)
print(norm_path)
img = cv2.imread(norm_path, cv2.IMREAD_GRAYSCALE)
img = 1 - img.astype(np.float32) / 255. #inverse black-in-white lines to white-in-black
cond = cond_to_pos(args.direction) # lighting direction
img_out = sess.run(
outputs, {
inputs1: np.expand_dims(cond, 0),
inputs2: np.reshape(img, (1, s, s, 1)),
}
)
line = cv2.imread(os.path.join('val/', name), cv2.IMREAD_GRAYSCALE)
line = cv2.resize(line, (s, s))
shade = (1 - (np.squeeze(img_out) + 1) / 2) * 255. # inverse white-in-black shadow to black-in-white
final_output = 0.8 * line + 0.2 * shade # composite line drawing and shadow
cv2.imwrite(os.path.join('out/', name), final_output)
|
[
"os.makedirs",
"argparse.ArgumentParser",
"os.walk",
"os.path.exists",
"numpy.expand_dims",
"tensorflow.compat.v1.Session",
"cv2.imread",
"numpy.reshape",
"tensorflow.Graph",
"numpy.squeeze",
"tensorflow.import_graph_def",
"tensorflow.compat.v1.GraphDef",
"os.path.join",
"cv2.resize",
"tensorflow.compat.v1.global_variables_initializer"
] |
[((90, 143), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Shade Sketches"""'}), "(description='Shade Sketches')\n", (113, 143), False, 'import argparse\n'), ((1167, 1190), 'os.path.exists', 'os.path.exists', (['"""norm/"""'], {}), "('norm/')\n", (1181, 1190), False, 'import os\n'), ((1196, 1216), 'os.makedirs', 'os.makedirs', (['"""norm/"""'], {}), "('norm/')\n", (1207, 1216), False, 'import os\n'), ((1225, 1247), 'os.path.exists', 'os.path.exists', (['"""out/"""'], {}), "('out/')\n", (1239, 1247), False, 'import os\n'), ((1253, 1272), 'os.makedirs', 'os.makedirs', (['"""out/"""'], {}), "('out/')\n", (1264, 1272), False, 'import os\n'), ((1339, 1362), 'tensorflow.compat.v1.GraphDef', 'tf.compat.v1.GraphDef', ([], {}), '()\n', (1360, 1362), True, 'import tensorflow as tf\n'), ((2604, 2627), 'tensorflow.compat.v1.GraphDef', 'tf.compat.v1.GraphDef', ([], {}), '()\n', (2625, 2627), True, 'import tensorflow as tf\n'), ((1474, 1520), 'tensorflow.import_graph_def', 'tf.import_graph_def', (['output_graph_def'], {'name': '""""""'}), "(output_graph_def, name='')\n", (1493, 1520), True, 'import tensorflow as tf\n'), ((1531, 1553), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (1551, 1553), True, 'import tensorflow as tf\n'), ((1578, 1621), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (1619, 1621), True, 'import tensorflow as tf\n'), ((1965, 1995), 'os.walk', 'os.walk', (['"""val/"""'], {'topdown': '(False)'}), "('val/', topdown=False)\n", (1972, 1995), False, 'import os\n'), ((2741, 2787), 'tensorflow.import_graph_def', 'tf.import_graph_def', (['output_graph_def'], {'name': '""""""'}), "(output_graph_def, name='')\n", (2760, 2787), True, 'import tensorflow as tf\n'), ((2798, 2820), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (2818, 2820), True, 'import tensorflow as tf\n'), ((2845, 2888), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (2886, 2888), True, 'import tensorflow as tf\n'), ((3166, 3197), 'os.walk', 'os.walk', (['"""norm/"""'], {'topdown': '(False)'}), "('norm/', topdown=False)\n", (3173, 3197), False, 'import os\n'), ((1291, 1301), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (1299, 1301), True, 'import tensorflow as tf\n'), ((2556, 2566), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (2564, 2566), True, 'import tensorflow as tf\n'), ((2096, 2120), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (2108, 2120), False, 'import os\n'), ((2185, 2228), 'cv2.imread', 'cv2.imread', (['line_path', 'cv2.IMREAD_GRAYSCALE'], {}), '(line_path, cv2.IMREAD_GRAYSCALE)\n', (2195, 2228), False, 'import cv2\n'), ((2255, 2278), 'cv2.resize', 'cv2.resize', (['img', '(s, s)'], {}), '(img, (s, s))\n', (2265, 2278), False, 'import cv2\n'), ((3300, 3324), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (3312, 3324), False, 'import os\n'), ((3389, 3432), 'cv2.imread', 'cv2.imread', (['norm_path', 'cv2.IMREAD_GRAYSCALE'], {}), '(norm_path, cv2.IMREAD_GRAYSCALE)\n', (3399, 3432), False, 'import cv2\n'), ((4008, 4032), 'cv2.resize', 'cv2.resize', (['line', '(s, s)'], {}), '(line, (s, s))\n', (4018, 4032), False, 'import cv2\n'), ((2479, 2506), 'os.path.join', 'os.path.join', (['"""norm/"""', 'name'], {}), "('norm/', name)\n", (2491, 2506), False, 'import os\n'), ((3931, 3957), 'os.path.join', 'os.path.join', (['"""val/"""', 'name'], {}), "('val/', name)\n", (3943, 3957), False, 'import os\n'), ((4283, 4309), 'os.path.join', 'os.path.join', (['"""out/"""', 'name'], {}), "('out/', name)\n", (4295, 4309), False, 'import os\n'), ((2393, 2444), 'numpy.reshape', 'np.reshape', (['img', '(1, img.shape[0], img.shape[1], 1)'], {}), '(img, (1, img.shape[0], img.shape[1], 1))\n', (2403, 2444), True, 'import numpy as np\n'), ((2508, 2527), 'numpy.squeeze', 'np.squeeze', (['img_out'], {}), '(img_out)\n', (2518, 2527), True, 'import numpy as np\n'), ((3751, 3774), 'numpy.expand_dims', 'np.expand_dims', (['cond', '(0)'], {}), '(cond, 0)\n', (3765, 3774), True, 'import numpy as np\n'), ((3813, 3842), 'numpy.reshape', 'np.reshape', (['img', '(1, s, s, 1)'], {}), '(img, (1, s, s, 1))\n', (3823, 3842), True, 'import numpy as np\n'), ((4068, 4087), 'numpy.squeeze', 'np.squeeze', (['img_out'], {}), '(img_out)\n', (4078, 4087), True, 'import numpy as np\n')]
|
"""
MIT License
Copyright (c) 2018 <NAME> and <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from itertools import cycle
import warnings
import matplotlib.pyplot as plt
import matplotlib as mpl
from scipy import sparse
import numpy as np
from sklearn.base import TransformerMixin
from sklearn.metrics.pairwise import pairwise_distances
from scipy.spatial.distance import squareform
import persim
from pyRipser import doRipsFiltrationDM as DRFDM
from pyRipser import doRipsFiltrationDMCycles as DRFDMCycles
from pyRipser import doRipsFiltrationDMSparse as DRFDMSparse
from pyRipser import doRipsFiltrationDMSparseCycles as DRFDMSparseCycles
def main():
rips = Rips()
dm = squareform([1.0, 1.0,1.41421356,1.41421356,1.0,1.0, 1.0,1.41421356,1.41421356,1.73205081, 1.41421356,1.0,1.73205081,1.41421356,1.0, 1.41421356,1.73205081,1.0,1.41421356,1.0,1.41421356, 1.73205081,1.41421356,1.41421356,1.0,1.41421356,1.0,1.0])
rips.fit_transform(dm, distance_matrix=True, do_cycles=False)
print()
def dpoint2pointcloud(X, i, metric):
"""
Return the distance from the ith point in a Euclidean point cloud
to the rest of the points
Parameters
----------
X: ndarray (n_samples, n_features)
A numpy array of data
i: int
The index of the point from which to return all distances
metric: string or callable
The metric to use when calculating distance between instances in a
feature array
"""
ds = pairwise_distances(X, X[i, :][None, :], metric=metric).flatten()
ds[i] = 0
return ds
def get_greedy_perm(X, n_perm=None, distance_matrix=False, metric="euclidean"):
"""
Compute a furthest point sampling permutation of a set of points
Parameters
----------
X: ndarray (n_samples, n_features)
A numpy array of either data or distance matrix
distance_matrix: bool
Indicator that X is a distance matrix, if not we compute
distances in X using the chosen metric.
n_perm: int
Number of points to take in the permutation
metric: string or callable
The metric to use when calculating distance between instances in a
feature array
Returns
-------
idx_perm: ndarray(n_perm)
Indices of points in the greedy permutation
lambdas: ndarray(n_perm)
Covering radii at different points
dperm2all: ndarray(n_perm, n_samples)
Distances from points in the greedy permutation to points
in the original point set
"""
if not n_perm:
n_perm = X.shape[0]
# By default, takes the first point in the list to be the
# first point in the permutation, but could be random
idx_perm = np.zeros(n_perm, dtype=np.int64)
lambdas = np.zeros(n_perm)
if distance_matrix:
dpoint2all = lambda i: X[i, :]
else:
dpoint2all = lambda i: dpoint2pointcloud(X, i, metric)
ds = dpoint2all(0)
dperm2all = [ds]
for i in range(1, n_perm):
idx = np.argmax(ds)
idx_perm[i] = idx
lambdas[i - 1] = ds[idx]
dperm2all.append(dpoint2all(idx))
ds = np.minimum(ds, dperm2all[-1])
lambdas[-1] = np.max(ds)
dperm2all = np.array(dperm2all)
return (idx_perm, lambdas, dperm2all)
def ripser(
X,
maxdim=1,
thresh=np.inf,
coeff=2,
distance_matrix=False,
do_cocycles=False,
do_cycles=False,
metric="euclidean",
n_perm=None,
):
"""Compute persistence diagrams for X.
X can be a data set of points or a distance matrix. When using a data set
as X it will be converted to a distance matrix using the metric specified.
Parameters
----------
X : ndarray (n_samples, n_features)
A numpy array of either data or distance matrix (also pass `distance_matrix=True`). Can also be a sparse distance matrix of type scipy.sparse
maxdim: int, optional, default 1
Maximum homology dimension computed. Will compute all dimensions lower than and equal to this value. For 1, H_0 and H_1 will be computed.
thresh: float, default infinity
Maximum distances considered when constructing filtration. If infinity, compute the entire filtration.
coeff: int prime, default 2
Compute homology with coefficients in the prime field Z/pZ for p=coeff.
distance_matrix: bool, optional, default False
When True the input matrix X will be considered a distance matrix.
do_cocycles: bool, optional, default False
Computed cocycles will be available in the `cocycles` value
of the return dictionary.
do_cycles: bool, optional, default False
Computed cycles will be available in the `cycles` value
of the return dictionary.
metric: string or callable, optional, default "euclidean"
Use this metric to compute distances between rows of X.
"euclidean", "manhattan" and "cosine" are already provided metrics
to choose from by using their name.
You can provide a callable function and it will be used with two
rows as arguments, it will be called once for each pair of rows in X.
The computed distance will be available in the result dictionary under
the key `dperm2all`.
n_perm: int, optional, default None
The number of points to subsample in a "greedy permutation,"
or a furthest point sampling of the points. These points
will be used in lieu of the full point cloud for a faster
computation, at the expense of some accuracy, which can
be bounded as a maximum bottleneck distance to all diagrams
on the original point set
Returns
-------
dict
The result of the computation.
.. note::
Each list in `dgms` has a relative list in `cocycles`.
>>> r = ripser(...)
For each dimension ``d`` and index ``k`` then ``r['dgms'][d][k]``
is the barcode associated to the representative cocycle
``r['cocycles'][d][k]``.
The keys available in the dictionary are the:
* ``dgms``: list (size maxdim) of ndarray (n_pairs, 2)
For each dimension less than ``maxdim`` a list of persistence diagrams.
Each persistent diagram is a pair (birth time, death time).
* ``cycles``: list (size maxdim) of list of ndarray
For each dimension less than ``maxdim`` a list of cycles.
Each cycle in dimension ``d`` is represented as a ndarray of
``(k,d+1)`` elements. Each non zero value of the cycle
is laid out in a row, with each of the vertices making up the cycle.
* ``cocycles``: list (size maxdim) of list of ndarray
For each dimension less than ``maxdim`` a list of representative cocycles.
Each representative cocycle in dimension ``d`` is represented as a
ndarray of ``(k,d+1)`` elements. Each non zero value of the cocycle
is laid out in a row, first the ``d`` indices of the simplex and then
the value of the cocycle on the simplex. The indices of the simplex
reference the original point cloud, even if a greedy permutation was used.
* ``num_edges``: int
The number of edges added during the computation
* ``dperm2all``: ndarray(n_samples, n_samples) or ndarray (n_perm, n_samples) if n_perm
The distance matrix used during the computation. When ``n_perm``
is not None the distance matrix will only refers to the subsampled
dataset.
* ``idx_perm``: ndarray(n_perm) if ``n_perm`` > 0
Index into the original point cloud of the points used
as a subsample in the greedy permutation
>>> r = ripser(X, n_perm=k)
>>> subsampling = X[r['idx_perm']]
* 'r_cover': float
Covering radius of the subsampled points.
If ``n_perm <= 0``, then the full point cloud was used and this is 0
Examples
--------
.. code:: python
from ripser import ripser, plot_dgms
from sklearn import datasets
from persim import plot_diagrams
data = datasets.make_circles(n_samples=110)[0]
dgms = ripser(data, cycles=True)['dgms']
plot_diagrams(dgms, show = True)
Raises
------
ValueError
If the distance matrix is not square.
ValueError
When using both a greedy permutation and a sparse distance matrix.
ValueError
When `n_perm` value is bigger than the number of rows in the matrix.
ValueError
When `n_perm` is non positive.
Warns
----
When using a square matrix without toggling `distance_matrix` to True.
When there are more columns than rows (as each row is a different data point).
"""
dim_0_pairs = []
cycles = []
if distance_matrix:
if not (X.shape[0] == X.shape[1]):
raise ValueError("Distance matrix is not square")
else:
if X.shape[0] == X.shape[1]:
warnings.warn(
"The input matrix is square, but the distance_matrix "
+ "flag is off. Did you mean to indicate that "
+ "this was a distance matrix?"
)
elif X.shape[0] < X.shape[1]:
warnings.warn(
"The input point cloud has more columns than rows; "
+ "did you mean to transpose?"
)
if n_perm and distance_matrix and sparse.issparse(X):
raise ValueError(
"Greedy permutation is not supported for sparse distance matrices"
)
if n_perm and n_perm > X.shape[0]:
raise ValueError(
"Number of points in greedy permutation is greater"
+ " than number of points in the point cloud"
)
if n_perm and n_perm < 0:
raise ValueError(
"Should be a strictly positive number of points in the greedy permutation"
)
idx_perm = np.arange(X.shape[0])
r_cover = 0.0
doing_permutation = False
if n_perm and n_perm < X.shape[0]:
doing_permutation = True
idx_perm, lambdas, dperm2all = get_greedy_perm(
X, n_perm=n_perm, distance_matrix=distance_matrix, metric=metric
)
r_cover = lambdas[-1]
dm = dperm2all[:, idx_perm]
else:
if distance_matrix:
dm = X
else:
dm = pairwise_distances(X, metric=metric)
dperm2all = dm
n_points = dm.shape[0]
if not sparse.issparse(dm) and np.sum(np.abs(dm.diagonal()) > 0) > 0:
# If any of the diagonal elements are nonzero,
# convert to sparse format, because currently
# that's the only format that handles nonzero
# births
dm = sparse.coo_matrix(dm)
if sparse.issparse(dm):
if sparse.isspmatrix_coo(dm):
# If the matrix is already COO, we need to order the row and column indices
# lexicographically to avoid errors. See issue #103
row, col, data = dm.row, dm.col, dm.data
lex_sort_idx = np.lexsort((col, row))
row, col, data = row[lex_sort_idx], col[lex_sort_idx], data[lex_sort_idx]
else:
# Lexicographic ordering is performed by scipy upon conversion to COO
coo = dm.tocoo()
row, col, data = coo.row, coo.col, coo.data
if do_cycles:
res = DRFDMSparseCycles(
row.astype(dtype=np.int32, order="C"),
col.astype(dtype=np.int32, order="C"),
np.array(data, dtype=np.float32, order="C"),
n_points,
maxdim,
thresh,
coeff,
)
else:
res = DRFDMSparse(
row.astype(dtype=np.int32, order="C"),
col.astype(dtype=np.int32, order="C"),
np.array(data, dtype=np.float32, order="C"),
n_points,
maxdim,
thresh,
coeff,
)
else:
I, J = np.meshgrid(np.arange(n_points), np.arange(n_points))
DParam = np.array(dm[I > J], dtype=np.float32)
if do_cycles:
res = DRFDMCycles(DParam,
maxdim,
thresh,
coeff
)
else:
res = DRFDM(DParam,
maxdim,
thresh,
coeff,
do_cocycles
)
#
# print(res)
# Unwrap persistence diagrams
dgms = res["births_and_deaths_by_dim"]
for dim in range(len(dgms)):
N = int(len(dgms[dim]) / 2)
# print(dgms[dim])
dgms[dim] = np.reshape(np.array(dgms[dim]), [N, 2])
# Unwrap cycles if calculated
if do_cycles:
for dim in range(len(res["cycles_by_dim"])):
cycles.append([])
for j in range(len(res["cycles_by_dim"][dim])):
ccl = res["cycles_by_dim"][dim][j]
n = int(len(ccl)/2)
ccl = np.reshape(np.array(ccl, dtype=np.int64), [n, 2])
ccl=np.concatenate((ccl[:1], ccl[2:], ccl[1].reshape(1, -1)), axis=0)
# ccl[:, -1] = np.mod(ccl[:, -1], coeff)
# if doing_permutation:
# Retain original indices in the point cloud
# ccl[:, 0:-1] = idx_perm[ccl[:, 0:-1]]
cycles[dim].append(ccl)
pairs = np.array(res["dim_0_pairs"])
if len(pairs) % 2 == 0:
pairs = np.append(pairs, np.array([0, np.nan]))
else:
pairs = np.append(pairs, np.array([np.nan]))
dim_0_pairs = np.reshape(pairs, (int(len(pairs) / 2), 2))
ret = {
"dgms": dgms,
"dim_0_pairs": dim_0_pairs,
"cycles": cycles,
"num_edges": res["num_edges"],
"dperm2all": dperm2all,
"idx_perm": idx_perm,
"r_cover": r_cover,
}
else:
# Unwrap cocycles
cocycles = []
for dim in range(len(res["cocycles_by_dim"])):
cocycles.append([])
for j in range(len(res["cocycles_by_dim"][dim])):
ccl = res["cocycles_by_dim"][dim][j]
n = int(len(ccl) / (dim + 2))
ccl = np.reshape(np.array(ccl, dtype=np.int64), [n, dim + 2])
ccl[:, -1] = np.mod(ccl[:, -1], coeff)
if doing_permutation:
# Retain original indices in the point cloud
ccl[:, 0:-1] = idx_perm[ccl[:, 0:-1]]
cocycles[dim].append(ccl)
ret = {
"dgms": dgms,
"cocycles": cocycles,
"num_edges": res["num_edges"],
"dperm2all": dperm2all,
"idx_perm": idx_perm,
"r_cover": r_cover,
}
return ret
def lower_star_img(img):
"""
Construct a lower star filtration on an image
Parameters
----------
img: ndarray (M, N)
An array of single channel image data
Returns
-------
I: ndarray (K, 2)
A 0-dimensional persistence diagram corresponding to the sublevelset filtration
"""
m, n = img.shape
idxs = np.arange(m * n).reshape((m, n))
I = idxs.flatten()
J = idxs.flatten()
V = img.flatten()
# Connect 8 spatial neighbors
tidxs = np.ones((m + 2, n + 2), dtype=np.int64) * np.nan
tidxs[1:-1, 1:-1] = idxs
tD = np.ones_like(tidxs) * np.nan
tD[1:-1, 1:-1] = img
for di in [-1, 0, 1]:
for dj in [-1, 0, 1]:
if di == 0 and dj == 0:
continue
thisJ = np.roll(np.roll(tidxs, di, axis=0), dj, axis=1)
thisD = np.roll(np.roll(tD, di, axis=0), dj, axis=1)
thisD = np.maximum(thisD, tD)
# Deal with boundaries
boundary = ~np.isnan(thisD)
thisI = tidxs[boundary]
thisJ = thisJ[boundary]
thisD = thisD[boundary]
I = np.concatenate((I, thisI.flatten()))
J = np.concatenate((J, thisJ.flatten()))
V = np.concatenate((V, thisD.flatten()))
sparseDM = sparse.coo_matrix((V, (I, J)), shape=(idxs.size, idxs.size))
return ripser(sparseDM, distance_matrix=True, maxdim=0)["dgms"][0]
class Rips(TransformerMixin):
""" sklearn style class interface for :code:`ripser` with :code:`fit` and :code:`transform` methods..
Parameters
----------
maxdim: int, optional, default 1
Maximum homology dimension computed. Will compute all dimensions
lower than and equal to this value.
For 1, H_0 and H_1 will be computed.
thresh: float, default infinity
Maximum distances considered when constructing filtration.
If infinity, compute the entire filtration.
coeff: int prime, default 2
Compute homology with coefficients in the prime field Z/pZ for p=coeff.
do_cocycles: bool
Indicator of whether to compute cocycles, if so, we compute and store
cocycles in the `cocycles_` dictionary Rips member variable
do_cycles: bool
Indicator of whether to compute cycles, if so, we compute and store
cycles in the `cycles_` dictionary Rips member variable
n_perm: int
The number of points to subsample in a "greedy permutation,"
or a furthest point sampling of the points. These points
will be used in lieu of the full point cloud for a faster
computation, at the expense of some accuracy, which can
be bounded as a maximum bottleneck distance to all diagrams
on the original point set
verbose: boolean
Whether to print out information about this object
as it is constructed
Attributes
----------
`dgm_`: list of ndarray, each shape (n_pairs, 2)
After `transform`, `dgm_` contains computed persistence diagrams in
each dimension
cocycles_: list (size maxdim) of list of ndarray
A list of representative cocycles in each dimension. The list
in each dimension is parallel to the diagram in that dimension;
that is, each entry of the list is a representative cocycle of
the corresponding point expressed as an ndarray(K, d+1), where K is
the number of nonzero values of the cocycle and d is the dimension
of the cocycle. The first d columns of each array index into
the simplices of the (subsampled) point cloud, and the last column
is the value of the cocycle at that simplex
cycles_: list (size maxdim) of list of ndarray
A list of representative cycles in each dimension. The list
in each dimension are the corresponding vertices which make up each
topological feature
dperm2all_: ndarray(n_samples, n_samples) or ndarray (n_perm, n_samples) if n_perm
The distance matrix used in the computation if n_perm is none.
Otherwise, the distance from all points in the permutation to
all points in the dataset
metric_: string or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
specified in pairwise_distances, including "euclidean", "manhattan",
or "cosine". Alternatively, if metric is a callable function, it is
called on each pair of instances (rows) and the resulting value
recorded. The callable should take two arrays from X as input and
return a value indicating the distance between them.
num_edges: int
The number of edges added during the computation
idx_perm: ndarray(n_perm) if n_perm > 0
Index into the original point cloud of the points used
as a subsample in the greedy permutation
r_cover: float
Covering radius of the subsampled points.
If n_perm <= 0, then the full point cloud was used and this is 0
Examples
--------
.. code:: python
from ripser import Rips
import tadasets
data = tadasets.dsphere(n=110, d=2)[0]
rips = Rips()
rips.transform(data)
rips.plot()
"""
def __init__(
self,
maxdim=1,
thresh=np.inf,
coeff=2,
do_cycles=False,
do_cocycles=False,
n_perm=None,
verbose=True,
):
# print(do_cycles)
self.maxdim = maxdim
self.thresh = thresh
self.coeff = coeff
self.do_cycles = do_cycles
self.do_cocycles = do_cocycles
self.n_perm = n_perm
self.verbose = verbose
# Internal variables
self.dgms_ = None
self.dim_0_pairs_ = None
self.cocycles_ = None
self.dperm2all_ = None # Distance matrix
self.metric_ = None
self.num_edges_ = None # Number of edges added
self.idx_perm_ = None
self.r_cover_ = 0.0
if self.verbose:
print(
"Rips(maxdim={}, thresh={}, coeff={}, do_cycles={}, do_cocycles={}, n_perm = {}, verbose={})".format(
maxdim, thresh, coeff, do_cycles, do_cocycles, n_perm, verbose
)
)
def transform(self, X, distance_matrix=False, metric="euclidean"):
result = ripser(
X,
maxdim=self.maxdim,
thresh=self.thresh,
coeff=self.coeff,
do_cycles = self.do_cycles,
do_cocycles=self.do_cocycles,
distance_matrix=distance_matrix,
metric=metric,
n_perm=self.n_perm,
)
self.dgms_ = result["dgms"]
self.num_edges_ = result["num_edges"]
self.dperm2all_ = result["dperm2all"]
self.idx_perm_ = result["idx_perm"]
if self.do_cycles:
self.dim_0_pairs_ = result["dim_0_pairs"]
self.cycles_ = result["cycles"][-1]
elif self.do_cocycles:
self.cocycles_ = result["cocycles"]
self.r_cover_ = result["r_cover"]
return self.dgms_
def fit_transform(self, X, distance_matrix=False, metric="euclidean"):
"""
Compute persistence diagrams for X data array and return the diagrams.
Parameters
----------
X: ndarray (n_samples, n_features)
A numpy array of either data or distance matrix.
distance_matrix: bool
Indicator that X is a distance matrix, if not we compute a
distance matrix from X using the chosen metric.
metric: string or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
specified in pairwise_distances, including "euclidean", "manhattan",
or "cosine". Alternatively, if metric is a callable function, it is
called on each pair of instances (rows) and the resulting value
recorded. The callable should take two arrays from X as input and
return a value indicating the distance between them.
Returns
-------
dgms: list (size maxdim) of ndarray (n_pairs, 2)
A list of persistence diagrams, one for each dimension less
than maxdim. Each diagram is an ndarray of size (n_pairs, 2) with
the first column representing the birth time and the second column
representing the death time of each pair.
"""
self.transform(X, distance_matrix, metric)
return self.dgms_
def print_persistence(self):
if self.do_cycles:
print("Dim 0 Persistence Intervals:")
for i, interval in enumerate(self.dgms_[0]):
if i == len(self.dgms_[0]):
print(f"[{interval[0]:.1f}, {interval[1]:.3f}): []")
continue
print(f"[{interval[0]:.1f}, {interval[1]:.3f}): {self.dim_0_pairs_[i]}")
print("Dim 1 Persistence Intervals:")
for i, interval in enumerate(self.dgms_[1]):
print(f"[{interval[0]:.1f}, {interval[1]:.3f}): {[x.tolist() for x in self.cycles_[i]]}")
else:
print("Dim 0 Persistence Intervals:")
for i, interval in enumerate(self.dgms_[0]):
if i == len(self.dgms_[0]) - 1:
print(f"[{interval[0]:.1f}, {interval[1]:.3f})")
continue
print(f"[{interval[0]:.1f}, {interval[1]:.3f})")
print("Dim 1 Persistence Intervals:")
for i, interval in enumerate(self.dgms_[1]):
print(f"[{interval[0]:.1f}, {interval[1]:.3f})")
def plot(
self,
diagrams=None,
*args,
**kwargs
):
"""A helper function to plot persistence diagrams.
Parameters
----------
diagrams: ndarray (n_pairs, 2) or list of diagrams
A diagram or list of diagrams as returned from self.fit.
If diagram is None, we use `self.dgm_` for plotting.
If diagram is a list of diagrams, then plot all on the same plot
using different colors.
plot_only: list of numeric
If specified, an array of only the diagrams that should be plotted.
title: string, default is None
If title is defined, add it as title of the plot.
xy_range: list of numeric [xmin, xmax, ymin, ymax]
User provided range of axes. This is useful for comparing
multiple persistence diagrams.
labels: string or list of strings
Legend labels for each diagram.
If none are specified, we use H_0, H_1, H_2,... by default.
colormap: string, default is 'default'
Any of matplotlib color palettes.
Some options are 'default', 'seaborn', 'sequential'.
See all available styles with
.. code:: python
import matplotlib as mpl
print(mpl.styles.available)
size: numeric, default is 20
Pixel size of each point plotted.
ax_color: any valid matplitlib color type.
See [https://matplotlib.org/api/colors_api.html](https://matplotlib.org/api/colors_api.html) for complete API.
diagonal: bool, default is True
Plot the diagonal x=y line.
lifetime: bool, default is False. If True, diagonal is turned to False.
Plot life time of each point instead of birth and death.
Essentially, visualize (x, y-x).
legend: bool, default is True
If true, show the legend.
show: bool, default is True
Call plt.show() after plotting.
If you are using self.plot() as part of a subplot,
set show=False and call plt.show() only once at the end.
"""
if diagrams is None:
# Allow using transformed diagrams as default
diagrams = self.dgms_
persim.plot_diagrams(
diagrams,
*args,
**kwargs
)
__all__ = ["Rips", "ripser", "lower_star_img"]
if __name__=="__main__":
main()
|
[
"numpy.maximum",
"numpy.argmax",
"scipy.sparse.issparse",
"pyRipser.doRipsFiltrationDMCycles",
"numpy.ones",
"numpy.isnan",
"numpy.arange",
"sklearn.metrics.pairwise.pairwise_distances",
"numpy.max",
"scipy.sparse.coo_matrix",
"pyRipser.doRipsFiltrationDM",
"numpy.minimum",
"numpy.ones_like",
"scipy.sparse.isspmatrix_coo",
"numpy.roll",
"scipy.spatial.distance.squareform",
"numpy.mod",
"persim.plot_diagrams",
"numpy.lexsort",
"numpy.zeros",
"numpy.array",
"warnings.warn"
] |
[((1676, 1955), 'scipy.spatial.distance.squareform', 'squareform', (['[1.0, 1.0, 1.41421356, 1.41421356, 1.0, 1.0, 1.0, 1.41421356, 1.41421356, \n 1.73205081, 1.41421356, 1.0, 1.73205081, 1.41421356, 1.0, 1.41421356, \n 1.73205081, 1.0, 1.41421356, 1.0, 1.41421356, 1.73205081, 1.41421356, \n 1.41421356, 1.0, 1.41421356, 1.0, 1.0]'], {}), '([1.0, 1.0, 1.41421356, 1.41421356, 1.0, 1.0, 1.0, 1.41421356, \n 1.41421356, 1.73205081, 1.41421356, 1.0, 1.73205081, 1.41421356, 1.0, \n 1.41421356, 1.73205081, 1.0, 1.41421356, 1.0, 1.41421356, 1.73205081, \n 1.41421356, 1.41421356, 1.0, 1.41421356, 1.0, 1.0])\n', (1686, 1955), False, 'from scipy.spatial.distance import squareform\n'), ((3690, 3722), 'numpy.zeros', 'np.zeros', (['n_perm'], {'dtype': 'np.int64'}), '(n_perm, dtype=np.int64)\n', (3698, 3722), True, 'import numpy as np\n'), ((3737, 3753), 'numpy.zeros', 'np.zeros', (['n_perm'], {}), '(n_perm)\n', (3745, 3753), True, 'import numpy as np\n'), ((4155, 4165), 'numpy.max', 'np.max', (['ds'], {}), '(ds)\n', (4161, 4165), True, 'import numpy as np\n'), ((4182, 4201), 'numpy.array', 'np.array', (['dperm2all'], {}), '(dperm2all)\n', (4190, 4201), True, 'import numpy as np\n'), ((11129, 11150), 'numpy.arange', 'np.arange', (['X.shape[0]'], {}), '(X.shape[0])\n', (11138, 11150), True, 'import numpy as np\n'), ((11954, 11973), 'scipy.sparse.issparse', 'sparse.issparse', (['dm'], {}), '(dm)\n', (11969, 11973), False, 'from scipy import sparse\n'), ((17436, 17496), 'scipy.sparse.coo_matrix', 'sparse.coo_matrix', (['(V, (I, J))'], {'shape': '(idxs.size, idxs.size)'}), '((V, (I, J)), shape=(idxs.size, idxs.size))\n', (17453, 17496), False, 'from scipy import sparse\n'), ((3979, 3992), 'numpy.argmax', 'np.argmax', (['ds'], {}), '(ds)\n', (3988, 3992), True, 'import numpy as np\n'), ((4107, 4136), 'numpy.minimum', 'np.minimum', (['ds', 'dperm2all[-1]'], {}), '(ds, dperm2all[-1])\n', (4117, 4136), True, 'import numpy as np\n'), ((10628, 10646), 'scipy.sparse.issparse', 'sparse.issparse', (['X'], {}), '(X)\n', (10643, 10646), False, 'from scipy import sparse\n'), ((11924, 11945), 'scipy.sparse.coo_matrix', 'sparse.coo_matrix', (['dm'], {}), '(dm)\n', (11941, 11945), False, 'from scipy import sparse\n'), ((11986, 12011), 'scipy.sparse.isspmatrix_coo', 'sparse.isspmatrix_coo', (['dm'], {}), '(dm)\n', (12007, 12011), False, 'from scipy import sparse\n'), ((13300, 13337), 'numpy.array', 'np.array', (['dm[I > J]'], {'dtype': 'np.float32'}), '(dm[I > J], dtype=np.float32)\n', (13308, 13337), True, 'import numpy as np\n'), ((16639, 16678), 'numpy.ones', 'np.ones', (['(m + 2, n + 2)'], {'dtype': 'np.int64'}), '((m + 2, n + 2), dtype=np.int64)\n', (16646, 16678), True, 'import numpy as np\n'), ((16727, 16746), 'numpy.ones_like', 'np.ones_like', (['tidxs'], {}), '(tidxs)\n', (16739, 16746), True, 'import numpy as np\n'), ((28390, 28437), 'persim.plot_diagrams', 'persim.plot_diagrams', (['diagrams', '*args'], {}), '(diagrams, *args, **kwargs)\n', (28410, 28437), False, 'import persim\n'), ((2466, 2520), 'sklearn.metrics.pairwise.pairwise_distances', 'pairwise_distances', (['X', 'X[i, :][None, :]'], {'metric': 'metric'}), '(X, X[i, :][None, :], metric=metric)\n', (2484, 2520), False, 'from sklearn.metrics.pairwise import pairwise_distances\n'), ((10181, 10339), 'warnings.warn', 'warnings.warn', (["('The input matrix is square, but the distance_matrix ' +\n 'flag is off. Did you mean to indicate that ' +\n 'this was a distance matrix?')"], {}), "('The input matrix is square, but the distance_matrix ' +\n 'flag is off. Did you mean to indicate that ' +\n 'this was a distance matrix?')\n", (10194, 10339), False, 'import warnings\n'), ((11568, 11604), 'sklearn.metrics.pairwise.pairwise_distances', 'pairwise_distances', (['X'], {'metric': 'metric'}), '(X, metric=metric)\n', (11586, 11604), False, 'from sklearn.metrics.pairwise import pairwise_distances\n'), ((11668, 11687), 'scipy.sparse.issparse', 'sparse.issparse', (['dm'], {}), '(dm)\n', (11683, 11687), False, 'from scipy import sparse\n'), ((12245, 12267), 'numpy.lexsort', 'np.lexsort', (['(col, row)'], {}), '((col, row))\n', (12255, 12267), True, 'import numpy as np\n'), ((13241, 13260), 'numpy.arange', 'np.arange', (['n_points'], {}), '(n_points)\n', (13250, 13260), True, 'import numpy as np\n'), ((13262, 13281), 'numpy.arange', 'np.arange', (['n_points'], {}), '(n_points)\n', (13271, 13281), True, 'import numpy as np\n'), ((13378, 13420), 'pyRipser.doRipsFiltrationDMCycles', 'DRFDMCycles', (['DParam', 'maxdim', 'thresh', 'coeff'], {}), '(DParam, maxdim, thresh, coeff)\n', (13389, 13420), True, 'from pyRipser import doRipsFiltrationDMCycles as DRFDMCycles\n'), ((13514, 13563), 'pyRipser.doRipsFiltrationDM', 'DRFDM', (['DParam', 'maxdim', 'thresh', 'coeff', 'do_cocycles'], {}), '(DParam, maxdim, thresh, coeff, do_cocycles)\n', (13519, 13563), True, 'from pyRipser import doRipsFiltrationDM as DRFDM\n'), ((13871, 13890), 'numpy.array', 'np.array', (['dgms[dim]'], {}), '(dgms[dim])\n', (13879, 13890), True, 'import numpy as np\n'), ((16490, 16506), 'numpy.arange', 'np.arange', (['(m * n)'], {}), '(m * n)\n', (16499, 16506), True, 'import numpy as np\n'), ((17054, 17075), 'numpy.maximum', 'np.maximum', (['thisD', 'tD'], {}), '(thisD, tD)\n', (17064, 17075), True, 'import numpy as np\n'), ((10444, 10546), 'warnings.warn', 'warnings.warn', (["('The input point cloud has more columns than rows; ' +\n 'did you mean to transpose?')"], {}), "('The input point cloud has more columns than rows; ' +\n 'did you mean to transpose?')\n", (10457, 10546), False, 'import warnings\n'), ((12721, 12764), 'numpy.array', 'np.array', (['data'], {'dtype': 'np.float32', 'order': '"""C"""'}), "(data, dtype=np.float32, order='C')\n", (12729, 12764), True, 'import numpy as np\n'), ((13048, 13091), 'numpy.array', 'np.array', (['data'], {'dtype': 'np.float32', 'order': '"""C"""'}), "(data, dtype=np.float32, order='C')\n", (13056, 13091), True, 'import numpy as np\n'), ((14645, 14673), 'numpy.array', 'np.array', (["res['dim_0_pairs']"], {}), "(res['dim_0_pairs'])\n", (14653, 14673), True, 'import numpy as np\n'), ((15651, 15676), 'numpy.mod', 'np.mod', (['ccl[:, -1]', 'coeff'], {}), '(ccl[:, -1], coeff)\n', (15657, 15676), True, 'import numpy as np\n'), ((16929, 16955), 'numpy.roll', 'np.roll', (['tidxs', 'di'], {'axis': '(0)'}), '(tidxs, di, axis=0)\n', (16936, 16955), True, 'import numpy as np\n'), ((16997, 17020), 'numpy.roll', 'np.roll', (['tD', 'di'], {'axis': '(0)'}), '(tD, di, axis=0)\n', (17004, 17020), True, 'import numpy as np\n'), ((17136, 17151), 'numpy.isnan', 'np.isnan', (['thisD'], {}), '(thisD)\n', (17144, 17151), True, 'import numpy as np\n'), ((14216, 14245), 'numpy.array', 'np.array', (['ccl'], {'dtype': 'np.int64'}), '(ccl, dtype=np.int64)\n', (14224, 14245), True, 'import numpy as np\n'), ((15577, 15606), 'numpy.array', 'np.array', (['ccl'], {'dtype': 'np.int64'}), '(ccl, dtype=np.int64)\n', (15585, 15606), True, 'import numpy as np\n'), ((14760, 14781), 'numpy.array', 'np.array', (['[0, np.nan]'], {}), '([0, np.nan])\n', (14768, 14781), True, 'import numpy as np\n'), ((14850, 14868), 'numpy.array', 'np.array', (['[np.nan]'], {}), '([np.nan])\n', (14858, 14868), True, 'import numpy as np\n')]
|
import warnings
warnings.filterwarnings('ignore')
import CONFIG
import DataLoader
import engine
import NERCRFModel
import os
import sys
import pickle
import torch
import torch.nn as nn
import pandas as pd
import numpy as np
from sklearn.preprocessing import LabelEncoder
def run():
df = pd.read_csv('input/ner_dataset.csv', encoding='latin-1')
df['Sentence #'] = df['Sentence #'].fillna(method='ffill')
if os.path.exists('input/pos_lb.pickle'):
pos_lb = pickle.load(open('input/pos_lb.pickle', 'rb'))
else:
pos_lb = LabelEncoder().fit(df.POS.values)
pickle.dump(pos_lb, open('input/pos_lb.pickle', 'wb'))
df['POS'] = pos_lb.transform(df.POS.values)
pos_pad_idx = pos_lb.transform(['.'])[0]
sentence = df.groupby('Sentence #')['Word'].apply(list).values
pos = df.groupby('Sentence #')['POS'].apply(list).values
print('-------- [INFO] TOKENIZING --------\n')
data = DataLoader.DataLoader(sentence, pos)
data_len = len(data)
indices = np.arange(0, data_len)
valid_len = int(data_len*CONFIG.Valid_split)
train_index = indices[valid_len:]
valid_index = indices[:valid_len]
train_sampler = torch.utils.data.sampler.SubsetRandomSampler(train_index)
valid_sampler = torch.utils.data.sampler.SubsetRandomSampler(valid_index)
if not os.path.exists('input/word_to_idx.pickle'):
pickle.dump(data.vocab.word_to_idx, open('input/word_to_idx.pickle', 'wb'))
pad_idx = data.vocab.word_to_idx['<PAD>']
train_loader = torch.utils.data.DataLoader(
data,
num_workers=4,
batch_size=CONFIG.BATCH_SIZE,
pin_memory=True,
collate_fn=DataLoader.MyCollate(pad_idx, pos_pad_idx),
sampler=train_sampler
)
valid_loader = torch.utils.data.DataLoader(
data,
num_workers=4,
batch_size=CONFIG.BATCH_SIZE,
pin_memory=True,
collate_fn=DataLoader.MyCollate(pad_idx, pos_pad_idx),
sampler=valid_sampler
)
vocab_size = len(data.vocab.word_to_idx)
num_pos_class = len(list(pos_lb.classes_))
tag_to_idx = {str(x): num for num, x in enumerate(list(pos_lb.classes_))}
tag_to_idx['start_tag'] = num_pos_class
tag_to_idx['stop_tag'] = num_pos_class + 1
if torch.cuda.is_available():
accelarator = 'cuda'
torch.backends.cudnn.benchmark = True
else:
accelarator = 'cpu'
device = torch.device(accelarator)
model = NERCRFModel.NER(
vocab_size=vocab_size,
embed_dims=CONFIG.EMBED_DIMS,
hidden_dims=CONFIG.HIDDEN_DIMS,
num_layers=CONFIG.NUM_HIDDEN_LAYER,
num_classes=len(tag_to_idx),
dropout=CONFIG.DROPOUT,
bidirectional=CONFIG.BIDIRECTIONAL,
tag_to_idx=tag_to_idx,
device=device
)
model = model.to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=CONFIG.LR)
best_loss = 1e32
print('\n-----------[INFO] STARTING TRAINING ----------------\n')
for epoch in range(CONFIG.EPOCHS):
train_loss = engine.train(model, train_loader, optimizer, device)
eval_loss, val_pos_acc = engine.eval(model, valid_loader, device)
print(f'EPOCH -> {epoch+1}/{CONFIG.EPOCHS}')
print(f'TRAIN LOSS = {np.round(train_loss, 5)}')
print(f'VAL LOSS = {np.round(eval_loss, 5)} | VAL POS ACC = {np.round(val_pos_acc*100, 5)}%')
if best_loss > eval_loss:
best_loss = eval_loss
best_model = model.state_dict()
torch.save(best_model, CONFIG.Model_Path)
if __name__ == "__main__":
run()
|
[
"torch.utils.data.sampler.SubsetRandomSampler",
"DataLoader.DataLoader",
"warnings.filterwarnings",
"pandas.read_csv",
"os.path.exists",
"engine.train",
"sklearn.preprocessing.LabelEncoder",
"torch.save",
"engine.eval",
"numpy.arange",
"torch.cuda.is_available",
"DataLoader.MyCollate",
"torch.device",
"numpy.round"
] |
[((16, 49), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (39, 49), False, 'import warnings\n'), ((296, 352), 'pandas.read_csv', 'pd.read_csv', (['"""input/ner_dataset.csv"""'], {'encoding': '"""latin-1"""'}), "('input/ner_dataset.csv', encoding='latin-1')\n", (307, 352), True, 'import pandas as pd\n'), ((429, 466), 'os.path.exists', 'os.path.exists', (['"""input/pos_lb.pickle"""'], {}), "('input/pos_lb.pickle')\n", (443, 466), False, 'import os\n'), ((948, 984), 'DataLoader.DataLoader', 'DataLoader.DataLoader', (['sentence', 'pos'], {}), '(sentence, pos)\n', (969, 984), False, 'import DataLoader\n'), ((1025, 1047), 'numpy.arange', 'np.arange', (['(0)', 'data_len'], {}), '(0, data_len)\n', (1034, 1047), True, 'import numpy as np\n'), ((1195, 1252), 'torch.utils.data.sampler.SubsetRandomSampler', 'torch.utils.data.sampler.SubsetRandomSampler', (['train_index'], {}), '(train_index)\n', (1239, 1252), False, 'import torch\n'), ((1273, 1330), 'torch.utils.data.sampler.SubsetRandomSampler', 'torch.utils.data.sampler.SubsetRandomSampler', (['valid_index'], {}), '(valid_index)\n', (1317, 1330), False, 'import torch\n'), ((2286, 2311), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2309, 2311), False, 'import torch\n'), ((2444, 2469), 'torch.device', 'torch.device', (['accelarator'], {}), '(accelarator)\n', (2456, 2469), False, 'import torch\n'), ((3535, 3576), 'torch.save', 'torch.save', (['best_model', 'CONFIG.Model_Path'], {}), '(best_model, CONFIG.Model_Path)\n', (3545, 3576), False, 'import torch\n'), ((1343, 1385), 'os.path.exists', 'os.path.exists', (['"""input/word_to_idx.pickle"""'], {}), "('input/word_to_idx.pickle')\n", (1357, 1385), False, 'import os\n'), ((3075, 3127), 'engine.train', 'engine.train', (['model', 'train_loader', 'optimizer', 'device'], {}), '(model, train_loader, optimizer, device)\n', (3087, 3127), False, 'import engine\n'), ((3161, 3201), 'engine.eval', 'engine.eval', (['model', 'valid_loader', 'device'], {}), '(model, valid_loader, device)\n', (3172, 3201), False, 'import engine\n'), ((1686, 1728), 'DataLoader.MyCollate', 'DataLoader.MyCollate', (['pad_idx', 'pos_pad_idx'], {}), '(pad_idx, pos_pad_idx)\n', (1706, 1728), False, 'import DataLoader\n'), ((1934, 1976), 'DataLoader.MyCollate', 'DataLoader.MyCollate', (['pad_idx', 'pos_pad_idx'], {}), '(pad_idx, pos_pad_idx)\n', (1954, 1976), False, 'import DataLoader\n'), ((559, 573), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (571, 573), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((3285, 3308), 'numpy.round', 'np.round', (['train_loss', '(5)'], {}), '(train_loss, 5)\n', (3293, 3308), True, 'import numpy as np\n'), ((3342, 3364), 'numpy.round', 'np.round', (['eval_loss', '(5)'], {}), '(eval_loss, 5)\n', (3350, 3364), True, 'import numpy as np\n'), ((3385, 3415), 'numpy.round', 'np.round', (['(val_pos_acc * 100)', '(5)'], {}), '(val_pos_acc * 100, 5)\n', (3393, 3415), True, 'import numpy as np\n')]
|
#Main program for Project 1 - Algorithms class
import tkinter as tk
from tkinter import *
import csv
import time
import sys
import pandas as pd
import numpy as np
import subprocess
#Application class definition (Tk frame for graphics.)
class Application(tk.Frame):
#Initialization of the class and subsequently GUI.
def __init__(self, master=None):
super().__init__(master)
self.master = master
self.pack()
self.create_window()
#Window creation method.
def create_window(self):
self.window = tk
#Sort dataset for selection in the GUI
sorts = [("Selection Sort","selectionSort"), ("Bubble Sort","bubbleSort"), ("Insertion Sort","insertionSort"), ("Merge Sort","mergeSort"), ("Quick Sort","quickSort")]
#File dataset for selection in the GUI. Files generated by the PRNG are saved as CSV for easier parsing.
files = [(5000, "5000.csv"), (25000, "25000.csv"), (80000, "80000.csv"), (150000, "150000.csv"), (300000, "300000.csv")]
#Setting vars for selections
v = StringVar()
v.set("selectionSort")
f = StringVar()
f.set("5000.csv")
r = IntVar()
r.set(1)
count = 0
self.label = tk.Label(self)
self.label["text"] = "Select sorting algorithm:"
self.label.pack(side=TOP)
#Creation of radiobuttons
for item, method in sorts:
b = Radiobutton(self, text=item,
variable=v, value=method)
b.pack(anchor=W, side=TOP)
self.label1 = tk.Label(self)
self.label1["text"] = "Select input size:"
self.label1.pack(side=TOP)
#Creation of radiobuttons
for item, n in files:
b = Radiobutton(self, text=item,
variable=f, value=n)
b.pack(anchor=W, side=TOP)
count +=1
self.button = tk.Button(self)
self.label2 = tk.Label(self)
self.label2["text"] = "Number of runs: "
self.label2.pack(side=TOP)
self.runs = tk.Spinbox(self, from_=1, to=10, textvariable=r)
self.runs.pack(side=TOP)
self.button1 = tk.Button(self)
self.button1["text"] = "Start"
self.button1["command"] = lambda: route(self,v.get(),f.get(), r.get())
self.button1.pack(side=TOP)
#Allow to quit.
self.quit = tk.Button(self, text="QUIT", fg="red", command=self.master.destroy)
self.quit.pack(side=BOTTOM)
#Main router method to call the algorithm to use.
def route(self, algorithm, file, runs):
fileL = []
#Open file and parse info.
with open('{}'.format(file)) as csv_file:
reader = csv.reader(csv_file, delimiter=',')
fileL = list(reader)
fileLi = fileL[0]
#Convert the info to Int from String
fileList = [int(x) for x in fileLi]
##
## Selection of algorithm with timer start and end.
## Timer starts before method is called and ends after method returns valid output.
##
if algorithm == "selectionSort":
times = []
fil = fileList[:]
start = time.time()
selectionSort(self,fil)
end = time.time()
print(end-start)
for i in range(0, runs):
#COPY BY VALUE NOT REFERENCE
files = fileList[:]
start = time.time()
selectionSort(self,files)
end = time.time()
times.append(end-start)
for t in times:
print("Selection Sort {}, time elapsed: {} s".format(file, t))
return times
elif algorithm == "bubbleSort":
times = []
fil = fileList[:]
start = time.time()
bubbleSort(self,fil)
end = time.time()
print(end-start)
for i in range(0, runs):
#COPY BY VALUE NOT REFERENCE
files = np.array(fileList[:])
start = time.time()
bubbleSort(self,files)
end = time.time()
times.append(end-start)
for t in times:
print("Bubble Sort {}, time elapsed: {} s".format(file, t))
return times
elif algorithm == "insertionSort":
times = []
fil = fileList[:]
start = time.time()
insertionSort(self,fil)
end = time.time()
print(end-start)
for i in range(0, runs):
#COPY BY VALUE NOT REFERENCE
files = np.array(fileList[:])
start = time.time()
insertionSort(self,files)
end = time.time()
times.append(end-start)
for t in times:
print("Insertion Sort {}, time elapsed: {} s".format(file, t))
return times
elif algorithm == "mergeSort":
times = []
for i in range(0, runs):
#COPY BY VALUE NOT REFERENCE
files = np.array(fileList[:])
start = time.time()
mergeSort(self,files)
end = time.time()
times.append(end-start)
for t in times:
print("Merge Sort {}, time elapsed: {} s".format(file, t))
return times
elif algorithm == "quickSort":
times = []
for i in range(0, runs):
#COPY BY VALUE NOT REFERENCE
files = fileList[:]
start = time.time()
quickSort(self, files, 0, len(files)-1)
end = time.time()
times.append(end-start)
for t in times:
print("Quick Sort {}, time elapsed: {} s".format(file, t))
return times
'''
SelectionSort Implementation
O(n^2) - In
'''
def selectionSort(self, array):
n = len(array)
#Loop through array and subarray.
for i in range(0,n-1):
min = i
for j in range(i+1, n):
#Comparison
if array[j] < array[min]:
min = j
#Swap in place.
array[i], array[min] = array[min], array[i]
return array
'''
BubbleSort
O(N^2) - In Place
'''
def bubbleSort(self, array):
n = len(array)
## GO through all elements
for i in range(0, n-1):
for j in range(0, n-i-1):
#swap
if array[j] > array[j+1]:
array[j], array[j+1] = array[j+1], array[j]
return array
'''
InsertionSort
O(N^2) - In Place
'''
def insertionSort(self, array):
n = len(array)
for i in range(1, n):
current = array[i]
j = i-1
while j >= 0 and current < array[j]:
array[j+1] = array[j]
j -= 1
array[j+1] = current
return array
'''
MergeSort
O(nlogn) - Division of arrays
'''
def mergeSort(self, arr):
if len(arr) >1:
mid = len(arr)//2 # Finding the mid of the array
L = arr[:mid] # Dividing the array elements
R = arr[mid:] # into 2 halves
mergeSort(self, L) # Sorting the first half
mergeSort(self, R) # Sorting the second half
i = j = k = 0
# Copy data to temp arrays L[] and R[]
while i < len(L) and j < len(R):
if L[i] < R[j]:
arr[k] = L[i]
i+= 1
else:
arr[k] = R[j]
j+= 1
k+= 1
# Checking if any element was left
while i < len(L):
arr[k] = L[i]
i+= 1
k+= 1
while j < len(R):
arr[k] = R[j]
j+= 1
k+= 1
return arr
'''
QUICKSORT
O(nlogn) - In Place
'''
def partition(array, start, end):
pivot = array[start]
low = start + 1
high = end
while True:
# If the current value we're looking at is larger than the pivot
# it's in the right place (right side of pivot) and we can move left,
# to the next element.
# We also need to make sure we haven't surpassed the low pointer, since that
# indicates we have already moved all the elements to their correct side of the pivot
while low <= high and array[high] >= pivot:
high = high - 1
# Opposite process of the one above
while low <= high and array[low] <= pivot:
low = low + 1
# We either found a value for both high and low that is out of order
# or low is higher than high, in which case we exit the loop
if low <= high:
array[low], array[high] = array[high], array[low]
# The loop continues
else:
# We exit out of the loop
break
array[start], array[high] = array[high], array[start]
return high
def quickSort(self, array, start, end):
sys.setrecursionlimit(len(array)+30000)
if start >= end:
return
p = partition(array, start, end)
quickSort(self, array, start, p-1)
quickSort(self, array, p+1, end)
return array
root = tk.Tk()
app = Application(master=root)
app.mainloop()
|
[
"tkinter.Spinbox",
"csv.reader",
"tkinter.Button",
"time.time",
"numpy.array",
"tkinter.Label",
"tkinter.Tk"
] |
[((10819, 10826), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (10824, 10826), True, 'import tkinter as tk\n'), ((1262, 1276), 'tkinter.Label', 'tk.Label', (['self'], {}), '(self)\n', (1270, 1276), True, 'import tkinter as tk\n'), ((1595, 1609), 'tkinter.Label', 'tk.Label', (['self'], {}), '(self)\n', (1603, 1609), True, 'import tkinter as tk\n'), ((1989, 2003), 'tkinter.Label', 'tk.Label', (['self'], {}), '(self)\n', (1997, 2003), True, 'import tkinter as tk\n'), ((2109, 2157), 'tkinter.Spinbox', 'tk.Spinbox', (['self'], {'from_': '(1)', 'to': '(10)', 'textvariable': 'r'}), '(self, from_=1, to=10, textvariable=r)\n', (2119, 2157), True, 'import tkinter as tk\n'), ((2215, 2230), 'tkinter.Button', 'tk.Button', (['self'], {}), '(self)\n', (2224, 2230), True, 'import tkinter as tk\n'), ((2430, 2497), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""QUIT"""', 'fg': '"""red"""', 'command': 'self.master.destroy'}), "(self, text='QUIT', fg='red', command=self.master.destroy)\n", (2439, 2497), True, 'import tkinter as tk\n'), ((1938, 1953), 'tkinter.Button', 'tk.Button', (['self'], {}), '(self)\n', (1947, 1953), True, 'import tkinter as tk\n'), ((2784, 2819), 'csv.reader', 'csv.reader', (['csv_file'], {'delimiter': '""","""'}), "(csv_file, delimiter=',')\n", (2794, 2819), False, 'import csv\n'), ((3332, 3343), 'time.time', 'time.time', ([], {}), '()\n', (3341, 3343), False, 'import time\n'), ((3406, 3417), 'time.time', 'time.time', ([], {}), '()\n', (3415, 3417), False, 'import time\n'), ((3626, 3637), 'time.time', 'time.time', ([], {}), '()\n', (3635, 3637), False, 'import time\n'), ((3710, 3721), 'time.time', 'time.time', ([], {}), '()\n', (3719, 3721), False, 'import time\n'), ((4074, 4085), 'time.time', 'time.time', ([], {}), '()\n', (4083, 4085), False, 'import time\n'), ((4145, 4156), 'time.time', 'time.time', ([], {}), '()\n', (4154, 4156), False, 'import time\n'), ((4321, 4342), 'numpy.array', 'np.array', (['fileList[:]'], {}), '(fileList[:])\n', (4329, 4342), True, 'import numpy as np\n'), ((4371, 4382), 'time.time', 'time.time', ([], {}), '()\n', (4380, 4382), False, 'import time\n'), ((4452, 4463), 'time.time', 'time.time', ([], {}), '()\n', (4461, 4463), False, 'import time\n'), ((4833, 4844), 'time.time', 'time.time', ([], {}), '()\n', (4842, 4844), False, 'import time\n'), ((4907, 4918), 'time.time', 'time.time', ([], {}), '()\n', (4916, 4918), False, 'import time\n'), ((5071, 5092), 'numpy.array', 'np.array', (['fileList[:]'], {}), '(fileList[:])\n', (5079, 5092), True, 'import numpy as np\n'), ((5121, 5132), 'time.time', 'time.time', ([], {}), '()\n', (5130, 5132), False, 'import time\n'), ((5205, 5216), 'time.time', 'time.time', ([], {}), '()\n', (5214, 5216), False, 'import time\n'), ((5613, 5634), 'numpy.array', 'np.array', (['fileList[:]'], {}), '(fileList[:])\n', (5621, 5634), True, 'import numpy as np\n'), ((5663, 5674), 'time.time', 'time.time', ([], {}), '()\n', (5672, 5674), False, 'import time\n'), ((5743, 5754), 'time.time', 'time.time', ([], {}), '()\n', (5752, 5754), False, 'import time\n'), ((6194, 6205), 'time.time', 'time.time', ([], {}), '()\n', (6203, 6205), False, 'import time\n'), ((6292, 6303), 'time.time', 'time.time', ([], {}), '()\n', (6301, 6303), False, 'import time\n')]
|
from __future__ import print_function
import gc
from timeit import default_timer
import matplotlib.pyplot as plt
from numpy import log2
from funcstructs.structures import *
from funcstructs.combinat import productrange
from funcstructs.prototypes.integer_partitions import fixed_lex_partitions
class Stopwatch(object):
"""A timer as a context manager.
Wraps around a timer. A custom timer can be passed to the
constructor. The default timer is timeit.default_timer. The garbage
collector is disabled during timings by default.
Laps can be recorded using the the "lap" method. When first called,
it records the total elapsed time. Subsequent calls append time
since the previous lap.
Usage:
>>> with Timer(factor=1000) as t:
... # do some things
... print t.elapsed
... # do other tings
... t.lap()
... # do yet more
... t.lap()
...
10.122 # in ms
>>> print t.laps, t.elapsed
(20.567, 5.136), 25.703
"""
def __init__(self, gcoff=True, timer=default_timer, factor=1):
self._timer = timer
self._factor = factor
self._end = None
self._lap = 0
self._laps = []
self._gcoff = gcoff
def __call__(self):
"""Return the current time"""
return self._timer()
def __enter__(self):
"""Set the start time."""
if self._gcoff:
gc.disable()
self._start = self()
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
"""Set the end time."""
self._end = self()
gc.enable()
def __str__(self):
return '%.3f' % self.elapsed
@property
def elapsed(self):
"""Return the current elapsed time since start
If the `elapsed` property is called in the context manager scope,
the elapsed time bewteen start and property access is returned.
However, if it is accessed outside of the context manager scope,
it returns the elapsed time bewteen entering and exiting the scope.
The `elapsed` property can thus be accessed at different points within
the context manager scope, to time different parts of the block.
"""
if self._end is None:
# if elapsed is called in the context manager scope
return (self() - self._start) * self._factor
else:
# if elapsed is called out of the context manager scope
return (self._end - self._start) * self._factor
def lap(self):
"""Mark start of new lap."""
if self._end is None:
self._laps.append(self.elapsed - self._lap)
self._lap = self.elapsed
else:
raise RuntimeError("Stopwatch not running")
@property
def laps(self):
"""Return laps recorded during the timing."""
return tuple(self._laps)
def reset(self):
"""Reset the timer and all laps."""
if self._end is None:
self._lap = 0
self._laps = []
self._start = self()
else:
raise RuntimeError("Cannot reset finished Timer")
def _call_string(*args, **kwargs):
"""Return format of args and kwargs as viewed when typed out"""
arg_strings = []
for arg in args:
arg_strings.append(repr(arg))
for key, arg in kwargs.items():
arg_strings.append(key+'='+repr(arg))
return "(%s)" % ', '.join(arg_strings)
def _object_name(ob):
"""Return name of ob."""
try:
return ob.__name__
except AttributeError:
return ob.__class__.__name__
def iteration_time(gen, *args, **kwargs):
"""Time to exhaust gen. If gen is callable, time gen(*args, **kwargs)."""
printing = kwargs.pop('printing', True)
call_sig = _object_name(gen)
if callable(gen):
gen = gen(*args, **kwargs)
call_sig += _call_string(*args, **kwargs)
with Stopwatch() as t:
for i, el in enumerate(gen, start=1):
pass
if printing:
print("Enumerated %s items from %s in %s seconds" % (i, call_sig, t))
return t.elapsed
def maptime(gen, mapfunc, setupfunc=None):
"""Array of times to apply mapfuc to each element in gen."""
gen = list(map((lambda el: el) if setupfunc is None else setupfunc, gen))
with Stopwatch() as t:
for el in gen:
mapfunc(el)
t.lap()
return t.laps
def _map_setup_call_sig(mapfunc, setupfunc):
"""Call string for callable and """
call_sig = _object_name(mapfunc) + "(%s)"
if setupfunc is not None:
call_sig %= _object_name(setupfunc) + "(%s)"
return call_sig
def mapbench(gen, mapfunc, *setupfuncs):
"""Comparison plot of mapfuncs applied to gen. A mapfunc may either be a
callable or a tuple-pair of callables, one being the mapping, and the other
being the setup function."""
gen = list(gen) # in case generator is consumed
setupfuncs = list(setupfuncs)
sigs = []
if mapfunc is not None:
if not setupfuncs:
setupfuncs.append(None)
for setupfunc in setupfuncs:
sigs.append((mapfunc, setupfunc))
else:
for mapfunc in setupfuncs:
sigs.append((mapfunc, None))
for mapfunc, setupfunc in sigs:
time = maptime(gen, mapfunc, setupfunc)
sig = _map_setup_call_sig(mapfunc, setupfunc) % 'x'
plt.plot(gen, time, label=sig)
plt.legend(loc='upper left')
print("benched %s" % sig)
plt.xlabel('x')
if __name__ == '__main__':
# Tree with all non-root nodes connected directly to root
def flattree(n): return rangefunc([0]*n)
# Endofunction with f[0] == 0 and f[n] == n-1 for n in f.domain
def talltree(n): return rangefunc([0] + list(range(n)))
# Cyclic permutation of range(n)
def bigcycle(n): return rangefunc(list(range(1, n)) + [0])
def balanced_binary_tree(n):
"""Produce a balanced binary tree of height n."""
h = int(log2(n))
tree = [h]
while h:
h -= 1
tree *= 2
tree = [h] + tree
return rangefunc(LevelSequence(tree).parents())
def scattered_tree(n):
"""Tree with a big cycle and things attached"""
return Endofunction(list(bigcycle(n//2))+list(talltree(n-n//2)))
iteration_time(fixed_lex_partitions(100, 40))
iteration_time(Funcstructs(12))
iteration_time(Funcstructs, 12)
mapbench(range(1, 2000), Endofunction.cycles, flattree)
mapbench(range(20, 2000), Endofunction.cycles, randfunc)
mapbench(range(20, 2000), periodicity,
lambda f: list(randfunc(f).values()))
plt.figure()
mapbench(
range(20, 2500), Endofunction.cycles,
randfunc,
identity,
randperm,
talltree,
balanced_binary_tree,
bigcycle,
scattered_tree
)
plt.show()
|
[
"matplotlib.pyplot.show",
"gc.disable",
"matplotlib.pyplot.plot",
"numpy.log2",
"matplotlib.pyplot.legend",
"funcstructs.prototypes.integer_partitions.fixed_lex_partitions",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.xlabel",
"gc.enable"
] |
[((5534, 5549), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (5544, 5549), True, 'import matplotlib.pyplot as plt\n'), ((6696, 6708), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6706, 6708), True, 'import matplotlib.pyplot as plt\n'), ((6922, 6932), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6930, 6932), True, 'import matplotlib.pyplot as plt\n'), ((1620, 1631), 'gc.enable', 'gc.enable', ([], {}), '()\n', (1629, 1631), False, 'import gc\n'), ((5428, 5458), 'matplotlib.pyplot.plot', 'plt.plot', (['gen', 'time'], {'label': 'sig'}), '(gen, time, label=sig)\n', (5436, 5458), True, 'import matplotlib.pyplot as plt\n'), ((5467, 5495), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""'}), "(loc='upper left')\n", (5477, 5495), True, 'import matplotlib.pyplot as plt\n'), ((6373, 6402), 'funcstructs.prototypes.integer_partitions.fixed_lex_partitions', 'fixed_lex_partitions', (['(100)', '(40)'], {}), '(100, 40)\n', (6393, 6402), False, 'from funcstructs.prototypes.integer_partitions import fixed_lex_partitions\n'), ((1430, 1442), 'gc.disable', 'gc.disable', ([], {}), '()\n', (1440, 1442), False, 'import gc\n'), ((6024, 6031), 'numpy.log2', 'log2', (['n'], {}), '(n)\n', (6028, 6031), False, 'from numpy import log2\n')]
|
import numpy as np
import cv2
from cv_utils import add_text
## Convert COCO keypoints to PoseTrack keypoints
def conver_coco_poseTrack(kps):
"""
input: N, 17, 3
kp_names = [0:'nose', 1:'l_eye', 2:'r_eye', 3:'l_ear', 4:'r_ear', 5:'l_shoulder',
6:'r_shoulder', 7:'l_elbow', 8:'r_elbow', 9:'l_wrist', 10:'r_wrist',
11:'l_hip', 12:'r_hip', 13:'l_knee', 14:'r_knee', 15:'l_ankle', 16:'r_ankle']
output: N, 15, 3
{0: 'right_ankle', 1: 'right_knee', 2: 'right_hip', 3: 'left_hip', 4:'left_knee', 5:'left_ankle', 6:'right_wrist', 7:'right_elbow', 8:'right_shoulder',9:'left_shoulder', 10:'left_elbow', 11:'left_wrist',12:'head_bottom', 13:'nose', 14'head_top']
"""
MAP ={16:0, 14:1, 12:2, 11:3, 13:4, 15:5, 10:6, 8:7, 6:8, 5:9, 7:10, 9:11, 0:13}
pose_track_kps = np.zeros((kps.shape[0], 15, 3))
for k in range(kps.shape[0]):
pivel = (kps[k][5] + kps[k][6])/2
head_bottom = (pivel+kps[k][0])/2 # 12
head_top = 2*kps[k][0] - head_bottom # 14
head_top[0] = (head_bottom[0] + kps[k][0][0])/2 #改变x
for coco_i, pt_j in MAP.items():
pose_track_kps[k][pt_j] = kps[k][coco_i]
pose_track_kps[k][12] = head_bottom
pose_track_kps[k][14] = head_top
return pose_track_kps
def draw_bbox(img, bbox, text=None, change_color=None):
x1, y1, w, h = bbox
pt1 = (int(x1), int(y1))
pt2 = (int(x1+w), int(y1+h))
if change_color:
img = cv2.rectangle(img, pt1, pt2, (2,0,2), 3, 2)
else:
img = cv2.rectangle(img, pt1, pt2, (255,0,255), 3, 2)
if text:
img = add_text(img, str(text), pt1, 2)
return img
|
[
"numpy.zeros",
"cv2.rectangle"
] |
[((864, 895), 'numpy.zeros', 'np.zeros', (['(kps.shape[0], 15, 3)'], {}), '((kps.shape[0], 15, 3))\n', (872, 895), True, 'import numpy as np\n'), ((1516, 1561), 'cv2.rectangle', 'cv2.rectangle', (['img', 'pt1', 'pt2', '(2, 0, 2)', '(3)', '(2)'], {}), '(img, pt1, pt2, (2, 0, 2), 3, 2)\n', (1529, 1561), False, 'import cv2\n'), ((1584, 1633), 'cv2.rectangle', 'cv2.rectangle', (['img', 'pt1', 'pt2', '(255, 0, 255)', '(3)', '(2)'], {}), '(img, pt1, pt2, (255, 0, 255), 3, 2)\n', (1597, 1633), False, 'import cv2\n')]
|
from numpy import array
def scigrid_2011_01_04_20():
ppc = {"version": '2'}
ppc["baseMVA"] = 100.0
ppc["bus"] = array([
[586, 3, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[589, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[590, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[594, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[595, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[597, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[599, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[601, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[602, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[603, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[607, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[608, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[609, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[610, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[612, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[613, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[614, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[616, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[617, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[618, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[619, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[621, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[623, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[624, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[628, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[629, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[631, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[632, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[637, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[638, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[639, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[640, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[641, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[642, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[643, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[646, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[647, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[650, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[652, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[655, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[657, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[658, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[661, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[662, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[663, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[666, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[668, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[670, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[672, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[676, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[677, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[678, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[679, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[681, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[683, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[687, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[689, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[691, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[693, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[694, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[695, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[696, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[697, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[698, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[699, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[700, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[701, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[702, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[704, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[705, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[707, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[708, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[711, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[713, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[714, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[716, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[717, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[719, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[721, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[722, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[723, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[724, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[725, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[726, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[727, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[728, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[730, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[731, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[732, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[733, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[735, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[736, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[737, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[739, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[741, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[742, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[745, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[746, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[747, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[749, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[750, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[758, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[760, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[761, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[762, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[763, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[765, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[767, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[769, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[771, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[772, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[774, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[775, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[776, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[777, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[778, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[779, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[781, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[784, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[785, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[786, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[787, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[788, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[789, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[791, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[792, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[793, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[794, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[795, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[796, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[800, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[801, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[802, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[806, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[808, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[809, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[811, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[814, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[816, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[817, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[818, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[821, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[822, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[825, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[826, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[830, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[833, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[834, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[835, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[836, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[837, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[839, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[840, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[841, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[843, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[844, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[848, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[849, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[850, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[851, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[852, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[853, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[855, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[856, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[857, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[858, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[859, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[860, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[862, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[863, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[864, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[865, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[866, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[867, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[869, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[870, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[872, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[873, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[874, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[875, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[876, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[877, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[881, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[882, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[883, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[885, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[886, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[888, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[889, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[890, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[895, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[897, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[898, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[899, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[900, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[902, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[903, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[905, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[906, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[907, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[909, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[913, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[915, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[917, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[918, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[920, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[922, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[923, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[924, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[928, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[931, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[934, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[935, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[936, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[937, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[939, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[942, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[944, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[948, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[950, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[952, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[956, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[957, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[958, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[959, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[960, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[963, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[965, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[966, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[967, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[968, 2, 0, 0, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1, 0.9 ],
[969, 2, 0, 0, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1, 0.9 ],
[971, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[972, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[973, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[975, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[976, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[977, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[978, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[981, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[982, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[983, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[984, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[985, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[986, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[987, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[988, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[990, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[993, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[994, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[995, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[996, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[997, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[998, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[999, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1000, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1002, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1003, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1007, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1008, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1010, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1011, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1012, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1018, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1019, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1023, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1025, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1026, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1027, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1028, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1029, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1030, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1031, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1032, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1033, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1034, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1035, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1036, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1037, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1038, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1039, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1041, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1042, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1044, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1046, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1047, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1048, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1049, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1050, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1051, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1052, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1053, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1054, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1055, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1056, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1057, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1058, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1059, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1060, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1061, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1062, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1063, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1064, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1065, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1066, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1067, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1072, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1073, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1074, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1077, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1079, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1080, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1081, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1082, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1083, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1084, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1085, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1086, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1087, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1088, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1089, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1090, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1091, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1092, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1093, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1094, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1095, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1096, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1097, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1098, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1099, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1101, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1102, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1103, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1104, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1105, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1106, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1107, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1108, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1109, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1110, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1111, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1112, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1113, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1114, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1115, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1116, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1117, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1118, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1119, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1120, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1121, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1122, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1123, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1124, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1125, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1126, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1127, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1128, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1129, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1130, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1131, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1132, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1133, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1134, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1135, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1136, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1137, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1138, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1139, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1140, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1141, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1142, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1143, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1144, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1145, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1146, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1147, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1148, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1149, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1150, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1151, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1152, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1153, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1154, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1155, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1156, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1157, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1158, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1159, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1160, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1161, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1162, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1163, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1164, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1165, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1166, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1167, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1168, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1169, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1170, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1173, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1174, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1175, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1176, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1177, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1178, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1179, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1180, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1181, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1182, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1183, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1184, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1185, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1186, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1187, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1188, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1189, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1190, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1191, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1196, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1197, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1198, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1199, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1200, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1203, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1204, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1211, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1212, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1213, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1214, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1215, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1216, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1217, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1218, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1219, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1220, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1221, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1222, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1225, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1226, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1228, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1229, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1230, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1231, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1232, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1233, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1235, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1236, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1237, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1238, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1239, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1240, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1241, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1242, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1243, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1244, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1245, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1246, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1247, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1248, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1249, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1250, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1251, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1252, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1253, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1254, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1255, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1256, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1257, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1258, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1259, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1260, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1261, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1267, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1274, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1275, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1276, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1277, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1278, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1282, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1283, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1287, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1288, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1289, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1290, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1291, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1292, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1293, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1294, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1295, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1300, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1301, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1302, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1303, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1306, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1307, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1308, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1312, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1317, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1319, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1323, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1326, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1327, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1328, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1331, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1336, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1337, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1339, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1340, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1346, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1348, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1349, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1356, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1357, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1359, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1360, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1361, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1362, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1363, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1364, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1365, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1366, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1372, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1373, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1374, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1375, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1376, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1377, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1378, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1379, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1380, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1381, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1382, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1383, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1384, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1385, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1386, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1387, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1388, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1389, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1390, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1391, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1392, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1393, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1394, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1395, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1396, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1397, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1398, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1399, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1400, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1401, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1402, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1403, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1404, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1405, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1406, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1407, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1408, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1409, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1410, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1411, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1412, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1413, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1414, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1418, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1419, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1421, 2, 0, 0, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1, 0.9 ],
[1422, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1423, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1424, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1425, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1426, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1427, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1428, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1431, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1432, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1433, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1434, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1435, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1436, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1437, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1438, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1439, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1440, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1443, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1446, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1447, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1448, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1449, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1450, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1451, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1452, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1453, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1454, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1455, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1456, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1457, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1458, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1459, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1460, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1461, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1462, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1463, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1464, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1465, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1466, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1467, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1468, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1469, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1470, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1471, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1472, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1473, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1474, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1475, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1476, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1477, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1482, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1483, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1484, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1485, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1486, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1489, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1490, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1491, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1492, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1493, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1494, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1495, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1500, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1501, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1503, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1504, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1512, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1513, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1518, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1519, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1, 1, 313.588363, 62.717673, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2, 1, 0, 0, 0, 0, 0, 1.000014, 0, 380.0, 0, 1.1, 0.9 ],
[3, 1, 54.963605, 10.992721, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[4, 1, 90.389472, 18.077894, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[5, 1, 0, 0, 0, 0, 0, 0.99988, 0, 380.0, 0, 1.1, 0.9 ],
[6, 1, 265.420957, 53.084191, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[7, 1, 200.027697, 40.005539, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[8, 1, 167.368885, 33.473777, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[9, 1, 113.188962, 22.637792, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[10, 1, 0, 0, 0, 0, 0, 0.99817, 0, 380.0, 0, 1.1, 0.9 ],
[11, 1, 99.172825, 19.834565, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[12, 1, 0, 0, 0, 0, 0, 1.000621, 0, 380.0, 0, 1.1, 0.9 ],
[13, 1, 0, 0, 0, 0, 0, 1.000047, 0, 380.0, 0, 1.1, 0.9 ],
[14, 1, 237.185017, 47.437003, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[15, 1, 0, 0, 0, 0, 0, 1.000288, 0, 380.0, 0, 1.1, 0.9 ],
[16, 1, 404.510393, 80.902079, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[17, 1, 95.272824, 19.054565, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[18, 1, 0, 0, 0, 0, 0, 1.000595, 0, 380.0, 0, 1.1, 0.9 ],
[19, 1, 235.383233, 47.076647, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[20, 1, 0, 0, 0, 0, 0, 0.996189, 0, 380.0, 0, 1.1, 0.9 ],
[21, 1, 1012.184007, 202.436801, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[22, 1, 0, 0, 0, 0, 0, 0.999173, 0, 380.0, 0, 1.1, 0.9 ],
[23, 1, 132.529206, 26.505841, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[24, 1, 0, 0, 0, 0, 0, 0.999969, 0, 380.0, 0, 1.1, 0.9 ],
[25, 1, 63.389643, 12.677929, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[26, 1, 0, 0, 0, 0, 0, 1.000136, 0, 380.0, 0, 1.1, 0.9 ],
[27, 1, 77.812541, 15.562508, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[28, 1, 229.912748, 45.98255, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[29, 1, 84.451739, 16.890348, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[30, 1, 0, 0, 0, 0, 0, 0.998963, 0, 380.0, 0, 1.1, 0.9 ],
[31, 1, 166.198842, 33.239768, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[32, 1, 0, 0, 0, 0, 0, 0.999676, 0, 380.0, 0, 1.1, 0.9 ],
[33, 1, 208.382115, 41.676423, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[34, 1, 41.342034, 8.268407, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[35, 1, 2.737061, 0.547412, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[36, 1, 9.062016, 1.812403, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[37, 1, 0, 0, 0, 0, 0, 1.002866, 0, 380.0, 0, 1.1, 0.9 ],
[38, 1, 218.324197, 43.664839, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[39, 1, 71.489926, 14.297985, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[40, 1, 74.673464, 14.934693, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[41, 1, 80.257049, 16.05141, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[42, 1, 0, 0, 0, 0, 0, 1.000642, 0, 380.0, 0, 1.1, 0.9 ],
[43, 1, 123.077802, 24.61556, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[44, 1, 157.459799, 31.49196, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[45, 1, 83.583183, 16.716637, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[46, 1, 0, 0, 0, 0, 0, 1.000156, 0, 380.0, 0, 1.1, 0.9 ],
[47, 1, 363.426388, 72.685278, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[48, 1, 249.807245, 49.961449, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[49, 1, 63.18863, 12.637726, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[50, 1, 92.011174, 18.402235, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[51, 1, 119.240475, 23.848095, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[52, 1, 0, 0, 0, 0, 0, 1.000169, 0, 380.0, 0, 1.1, 0.9 ],
[53, 1, 180.928324, 36.185665, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[54, 1, 91.922123, 18.384425, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[55, 1, 90.148739, 18.029748, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[56, 1, 0, 0, 0, 0, 0, 0.999632, 0, 380.0, 0, 1.1, 0.9 ],
[57, 1, 107.609434, 21.521887, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[58, 1, 246.495776, 49.299155, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[59, 1, 70.4007, 14.08014, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[60, 1, 37.117202, 7.42344, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[61, 1, 0, 0, 0, 0, 0, 0.999641, 0, 380.0, 0, 1.1, 0.9 ],
[62, 1, 282.97336, 56.594672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[63, 1, 167.036752, 33.40735, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[64, 1, 1772.598444, 354.519689, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[65, 1, 5.906328, 1.181266, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[66, 1, 187.401044, 37.480209, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[67, 1, 402.006808, 80.401362, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[68, 1, 0, 0, 0, 0, 0, 0.998739, 0, 380.0, 0, 1.1, 0.9 ],
[69, 1, 0, 0, 0, 0, 0, 0.99974, 0, 380.0, 0, 1.1, 0.9 ],
[70, 1, 760.505189, 152.101038, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[71, 1, 176.73161, 35.346322, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[72, 1, 289.462126, 57.892425, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[73, 1, 92.667734, 18.533547, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[74, 1, 0, 0, 0, 0, 0, 1.001013, 0, 380.0, 0, 1.1, 0.9 ],
[75, 1, 115.496612, 23.099322, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[76, 1, 111.479571, 22.295914, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[77, 1, 107.975583, 21.595117, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[78, 1, 0, 0, 0, 0, 0, 0.998303, 0, 380.0, 0, 1.1, 0.9 ],
[79, 1, 111.493111, 22.298622, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[80, 1, 118.422887, 23.684577, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[81, 1, 133.683311, 26.736662, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[82, 1, 4.449059, 0.889812, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[83, 1, 297.674862, 59.534972, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[84, 1, 29.304254, 5.860851, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[85, 1, 101.621462, 20.324292, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[86, 1, 0, 0, 0, 0, 0, 0.999961, 0, 380.0, 0, 1.1, 0.9 ],
[87, 1, 0, 0, 0, 0, 0, 0.999704, 0, 380.0, 0, 1.1, 0.9 ],
[88, 1, 82.021988, 16.404398, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[89, 1, 101.760831, 20.352166, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[90, 1, 117.529266, 23.505853, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[91, 1, 40.823815, 8.164763, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[92, 1, 44.553104, 8.910621, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[93, 1, 43.697669, 8.739534, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[94, 1, 0, 0, 0, 0, 0, 1.001239, 0, 380.0, 0, 1.1, 0.9 ],
[95, 1, 0, 0, 0, 0, 0, 1.00079, 0, 380.0, 0, 1.1, 0.9 ],
[96, 1, 0, 0, 0, 0, 0, 0.999999, 0, 380.0, 0, 1.1, 0.9 ],
[97, 1, 6.145751, 1.22915, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[98, 1, 112.995638, 22.599128, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[99, 1, 0, 0, 0, 0, 0, 1.000517, 0, 380.0, 0, 1.1, 0.9 ],
[100, 1, 0, 0, 0, 0, 0, 1.002008, 0, 380.0, 0, 1.1, 0.9 ],
[101, 1, 80.012435, 16.002487, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[102, 1, 154.867797, 30.973559, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[103, 1, 181.070422, 36.214084, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[104, 1, 0, 0, 0, 0, 0, 1.00004, 0, 380.0, 0, 1.1, 0.9 ],
[105, 1, 0, 0, 0, 0, 0, 1.000015, 0, 380.0, 0, 1.1, 0.9 ],
[106, 1, 0, 0, 0, 0, 0, 0.999888, 0, 380.0, 0, 1.1, 0.9 ],
[107, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1, 0.9 ],
[108, 1, 127.723107, 25.544621, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[109, 1, 51.712907, 10.342581, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[110, 1, 67.125426, 13.425085, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[111, 1, 118.293138, 23.658628, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[112, 1, 59.871239, 11.974248, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[113, 1, 94.378762, 18.875752, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[114, 1, 138.996837, 27.799367, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[115, 1, 89.603089, 17.920618, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[116, 1, 149.938448, 29.98769, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[117, 1, 0, 0, 0, 0, 0, 1.000126, 0, 380.0, 0, 1.1, 0.9 ],
[118, 1, 232.158232, 46.431646, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[119, 1, 45.001799, 9.00036, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[120, 1, 0, 0, 0, 0, 0, 1.000899, 0, 380.0, 0, 1.1, 0.9 ],
[121, 1, 61.112463, 12.222493, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[122, 1, 53.503341, 10.700668, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[123, 1, 0, 0, 0, 0, 0, 1.000012, 0, 380.0, 0, 1.1, 0.9 ],
[124, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9 ],
[125, 1, 0, 0, 0, 0, 0, 0.999433, 0, 380.0, 0, 1.1, 0.9 ],
[126, 1, 280.519344, 56.103869, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[127, 1, 216.870964, 43.374193, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[128, 1, 0, 0, 0, 0, 0, 1.000728, 0, 380.0, 0, 1.1, 0.9 ],
[129, 1, 0, 0, 0, 0, 0, 0.999992, 0, 380.0, 0, 1.1, 0.9 ],
[130, 1, 299.025609, 59.805122, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[131, 1, 66.024596, 13.204919, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[132, 1, 171.918065, 34.383613, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[133, 1, 57.585817, 11.517163, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[134, 1, 57.350004, 11.470001, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[135, 1, 57.42604, 11.485208, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[136, 1, 55.6303, 11.12606, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[137, 1, 44.499119, 8.899824, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[138, 1, 0, 0, 0, 0, 0, 0.999738, 0, 380.0, 0, 1.1, 0.9 ],
[139, 1, 87.169265, 17.433853, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[140, 1, 60.281279, 12.056256, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[141, 1, 71.422675, 14.284535, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[142, 1, 78.590439, 15.718088, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[143, 1, 0, 0, 0, 0, 0, 0.999983, 0, 380.0, 0, 1.1, 0.9 ],
[144, 1, 71.587765, 14.317553, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[145, 1, 208.250701, 41.65014, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[146, 1, 268.474329, 53.694866, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[147, 1, 164.558954, 32.911791, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[148, 1, 232.222895, 46.444579, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[149, 1, 149.71242, 29.942484, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[150, 1, 195.465109, 39.093022, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[151, 1, 46.061054, 9.212211, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[152, 1, 95.617972, 19.123594, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[153, 1, 170.598013, 34.119603, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[154, 1, 175.238014, 35.047603, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[155, 1, 182.525879, 36.505176, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[156, 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1, 0.9 ],
[157, 1, 0, 0, 0, 0, 0, 1.001115, 0, 380.0, 0, 1.1, 0.9 ],
[158, 1, 48.08949, 9.617898, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[159, 1, 0, 0, 0, 0, 0, 1.001058, 0, 380.0, 0, 1.1, 0.9 ],
[160, 1, 0, 0, 0, 0, 0, 1.000009, 0, 380.0, 0, 1.1, 0.9 ],
[161, 1, 149.290329, 29.858066, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[162, 1, 223.144798, 44.62896, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[163, 1, 44.626852, 8.92537, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[164, 1, 44.806324, 8.961265, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[165, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1, 0.9 ],
[166, 1, 52.385841, 10.477168, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[167, 1, 73.693692, 14.738738, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[168, 1, 50.295004, 10.059001, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[169, 1, 172.174302, 34.43486, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[170, 1, 129.374469, 25.874894, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[171, 1, 110.421061, 22.084212, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[172, 1, 54.191648, 10.83833, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[173, 1, 51.769064, 10.353813, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[174, 1, 77.686815, 15.537363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[175, 1, 51.735134, 10.347027, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[176, 1, 180.277733, 36.055547, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[177, 1, 29.396911, 5.879382, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[178, 1, 155.693252, 31.13865, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[179, 1, 57.36759, 11.473518, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[180, 1, 50.427579, 10.085516, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[181, 1, 38.061285, 7.612257, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[182, 1, 1.724194, 0.344839, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[183, 1, 516.10549, 103.221098, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[184, 1, 0, 0, 0, 0, 0, 0.999161, 0, 380.0, 0, 1.1, 0.9 ],
[185, 1, 110.366174, 22.073235, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[186, 1, 59.431612, 11.886322, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[187, 1, 34.761441, 6.952288, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[188, 1, 51.735134, 10.347027, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[189, 1, 189.835515, 37.967103, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[190, 1, 251.092986, 50.218597, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[191, 1, 0, 0, 0, 0, 0, 1.000011, 0, 380.0, 0, 1.1, 0.9 ],
[192, 1, 60.470797, 12.094159, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[193, 1, 51.651681, 10.330336, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[194, 1, 35.655983, 7.131197, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[195, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[196, 1, 50.023265, 10.004653, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[197, 1, 79.255224, 15.851045, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[198, 1, 46.898997, 9.379799, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[199, 1, 60.380898, 12.07618, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[200, 1, 51.736334, 10.347267, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[201, 1, 0, 0, 0, 0, 0, 0.998603, 0, 380.0, 0, 1.1, 0.9 ],
[202, 1, 53.015057, 10.603011, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[203, 1, 6.985209, 1.397042, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[204, 1, 204.735079, 40.947016, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[205, 1, 102.376756, 20.475351, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[206, 1, 49.133688, 9.826738, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[207, 1, 146.102427, 29.220485, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[208, 1, 43.021403, 8.604281, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[209, 1, 59.784717, 11.956943, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[210, 1, 68.68145, 13.73629, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[211, 1, 241.36201, 48.272402, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[212, 1, 60.493983, 12.098797, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[213, 1, 283.582271, 56.716454, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[214, 1, 190.814923, 38.162985, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[215, 1, 403.487677, 80.697535, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[216, 1, 136.050691, 27.210138, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[217, 1, 43.595473, 8.719095, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[218, 1, 132.815127, 26.563025, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[219, 1, 213.450096, 42.690019, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[220, 1, 0, 0, 0, 0, 0, 0.999455, 0, 380.0, 0, 1.1, 0.9 ],
[221, 1, 121.763271, 24.352654, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[222, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[223, 1, 120.674939, 24.134988, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[224, 1, 140.32833, 28.065666, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[225, 1, 251.967566, 50.393513, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[226, 1, 88.020057, 17.604011, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[227, 1, 109.655139, 21.931028, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[228, 1, 107.513515, 21.502703, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[229, 1, 237.909071, 47.581814, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[230, 1, 57.064182, 11.412836, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[231, 1, 0, 0, 0, 0, 0, 1.000723, 0, 380.0, 0, 1.1, 0.9 ],
[232, 1, 0, 0, 0, 0, 0, 0.999961, 0, 380.0, 0, 1.1, 0.9 ],
[233, 1, 0, 0, 0, 0, 0, 0.99979, 0, 380.0, 0, 1.1, 0.9 ],
[234, 1, 203.268961, 40.653792, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[235, 1, 66.100357, 13.220071, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[236, 1, 0, 0, 0, 0, 0, 0.999982, 0, 380.0, 0, 1.1, 0.9 ],
[237, 1, 0.547056, 0.109411, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[238, 1, 74.793756, 14.958751, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[239, 1, 103.336953, 20.667391, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[240, 1, 651.829683, 130.365937, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[241, 1, 482.331322, 96.466264, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[242, 1, 175.625563, 35.125113, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[243, 1, 141.694807, 28.338961, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[244, 1, 168.818838, 33.763768, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[245, 1, 0, 0, 0, 0, 0, 1.001057, 0, 380.0, 0, 1.1, 0.9 ],
[246, 1, 0, 0, 0, 0, 0, 1.000289, 0, 380.0, 0, 1.1, 0.9 ],
[247, 1, 33.501144, 6.700229, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[248, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9 ],
[249, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0, 0, 1.1, 0.9 ],
[250, 1, 0, 0, 0, 0, 0, 1.000004, 0, 380.0, 0, 1.1, 0.9 ],
[251, 1, 83.142241, 16.628448, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[252, 1, 213.221814, 42.644363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[253, 1, 93.612513, 18.722503, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[254, 1, 29.888922, 5.977784, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[255, 1, 146.991227, 29.398245, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[256, 1, 168.57336, 33.714672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[257, 1, 81.357232, 16.271446, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[258, 1, 265.133395, 53.026679, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[259, 1, 0, 0, 0, 0, 0, 0.999297, 0, 380.0, 0, 1.1, 0.9 ],
[260, 1, 165.00861, 33.001722, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[261, 1, 0, 0, 0, 0, 0, 1.001077, 0, 380.0, 0, 1.1, 0.9 ],
[262, 1, 0, 0, 0, 0, 0, 1.001213, 0, 380.0, 0, 1.1, 0.9 ],
[263, 1, 236.704636, 47.340927, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[264, 1, 306.426919, 61.285384, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[265, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9 ],
[266, 1, 147.677362, 29.535472, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[267, 1, 186.779823, 37.355965, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[268, 1, 64.951259, 12.990252, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[269, 1, 52.158296, 10.431659, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[270, 1, 0, 0, 0, 0, 0, 1.000028, 0, 380.0, 0, 1.1, 0.9 ],
[271, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[272, 1, 1.064221, 0.212844, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[273, 1, 145.532771, 29.106554, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[274, 1, 282.896536, 56.579307, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[275, 1, 52.959776, 10.591955, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[276, 1, 206.450669, 41.290134, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[277, 1, 0, 0, 0, 0, 0, 0.998999, 0, 380.0, 0, 1.1, 0.9 ],
[278, 1, 161.168499, 32.2337, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[279, 1, 0, 0, 0, 0, 0, 0.999351, 0, 380.0, 0, 1.1, 0.9 ],
[280, 1, 0, 0, 0, 0, 0, 0.999232, 0, 380.0, 0, 1.1, 0.9 ],
[281, 1, 212.884285, 42.576857, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[282, 1, 301.051348, 60.21027, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[283, 1, 120.674452, 24.13489, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[284, 1, 183.068733, 36.613747, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[285, 1, 81.642233, 16.328447, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[286, 1, 171.108932, 34.221786, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[287, 1, 105.167309, 21.033462, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[288, 1, 67.642881, 13.528576, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[289, 1, 106.382633, 21.276527, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[290, 1, 0, 0, 0, 0, 0, 1.004328, 0, 380.0, 0, 1.1, 0.9 ],
[291, 1, 70.009154, 14.001831, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[292, 1, 138.019839, 27.603968, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[293, 1, 121.642104, 24.328421, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[294, 1, 32.415782, 6.483156, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[295, 1, 67.825108, 13.565022, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[296, 1, 192.55564, 38.511128, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[297, 1, 202.378138, 40.475628, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[298, 1, 106.85968, 21.371936, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[299, 1, 103.492889, 20.698578, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[300, 1, 281.942654, 56.388531, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[301, 1, 0, 0, 0, 0, 0, 0.999155, 0, 380.0, 0, 1.1, 0.9 ],
[302, 1, 237.502196, 47.500439, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[303, 1, 121.988016, 24.397603, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[304, 1, 104.751195, 20.950239, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[305, 1, 0, 0, 0, 0, 0, 0.999588, 0, 380.0, 0, 1.1, 0.9 ],
[306, 1, 0, 0, 0, 0, 0, 1.001506, 0, 380.0, 0, 1.1, 0.9 ],
[307, 1, 124.244651, 24.84893, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[308, 1, 153.177102, 30.63542, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[309, 1, 250.619279, 50.123856, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[310, 1, 0, 0, 0, 0, 0, 0.999979, 0, 380.0, 0, 1.1, 0.9 ],
[311, 1, 212.878265, 42.575653, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[312, 1, 95.737279, 19.147456, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[313, 1, 0, 0, 0, 0, 0, 0.999814, 0, 380.0, 0, 1.1, 0.9 ],
[314, 1, 296.533151, 59.30663, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[315, 1, 0, 0, 0, 0, 0, 1.00139, 0, 380.0, 0, 1.1, 0.9 ],
[316, 1, 116.185543, 23.237109, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[317, 1, 156.439578, 31.287916, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[318, 1, 257.087979, 51.417596, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[319, 1, 9.209919, 1.841984, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[320, 1, 0, 0, 0, 0, 0, 0.999999, 0, 380.0, 0, 1.1, 0.9 ],
[321, 1, 217.864189, 43.572838, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[322, 1, 27.735514, 5.547103, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[323, 1, 2.885644, 0.577129, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[324, 1, 510.112065, 102.022413, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[325, 1, 166.171204, 33.234241, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[326, 1, 13.472646, 2.694529, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[327, 1, 115.941313, 23.188263, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[328, 1, 197.581816, 39.516363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[329, 1, 297.180666, 59.436133, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[330, 1, 0, 0, 0, 0, 0, 1.001493, 0, 380.0, 0, 1.1, 0.9 ],
[331, 1, 23.595134, 4.719027, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[332, 1, 0, 0, 0, 0, 0, 0.997873, 0, 380.0, 0, 1.1, 0.9 ],
[333, 1, 247.920323, 49.584065, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[334, 1, 0, 0, 0, 0, 0, 0.999337, 0, 380.0, 0, 1.1, 0.9 ],
[335, 1, 253.021394, 50.604279, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[336, 1, 0, 0, 0, 0, 0, 0.997899, 0, 380.0, 0, 1.1, 0.9 ],
[337, 1, 100.644491, 20.128898, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[338, 1, 273.163452, 54.63269, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[339, 1, 168.947817, 33.789563, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[340, 1, 142.841965, 28.568393, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[341, 1, 129.13199, 25.826398, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[342, 1, 224.001511, 44.800302, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[343, 1, 122.890495, 24.578099, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[344, 1, 308.115905, 61.623181, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[345, 1, 336.91261, 67.382522, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[346, 1, 334.468328, 66.893666, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[347, 1, 116.96938, 23.393876, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[348, 1, 305.765662, 61.153132, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[349, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[350, 1, 160.409129, 32.081826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[351, 1, 0, 0, 0, 0, 0, 0.999777, 0, 380.0, 0, 1.1, 0.9 ],
[352, 1, 1061.795233, 212.359047, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[353, 1, 3.192111, 0.638422, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[354, 1, 21.686927, 4.337385, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[355, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[356, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[357, 1, 0.054362, 0.010872, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[358, 1, 0, 0, 0, 0, 0, 1.00115, 0, 380.0, 0, 1.1, 0.9 ],
[359, 1, 3.174021, 0.634804, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[360, 1, 0, 0, 0, 0, 0, 1.000702, 0, 380.0, 0, 1.1, 0.9 ],
[361, 1, 81.236209, 16.247242, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[362, 1, 231.565239, 46.313048, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[363, 1, 340.939078, 68.187816, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[364, 1, 80.439881, 16.087976, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[365, 1, 72.199065, 14.439813, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[366, 1, 143.098317, 28.619663, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[367, 1, 69.167782, 13.833556, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[368, 1, 34.059353, 6.811871, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[369, 1, 27.987713, 5.597543, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[370, 1, 82.396627, 16.479325, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[371, 1, 414.583549, 82.91671, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[372, 1, 240.422957, 48.084591, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[373, 1, 162.237585, 32.447517, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[374, 1, 83.192687, 16.638537, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[375, 1, 272.900898, 54.58018, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[376, 1, 299.320888, 59.864178, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[377, 1, 214.189404, 42.837881, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[378, 1, 213.777134, 42.755427, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[379, 1, 73.679821, 14.735964, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[380, 1, 0, 0, 0, 0, 0, 1.001413, 0, 380.0, 0, 1.1, 0.9 ],
[381, 1, 246.389815, 49.277963, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[382, 1, 0, 0, 0, 0, 0, 0.999769, 0, 380.0, 0, 1.1, 0.9 ],
[383, 1, 0, 0, 0, 0, 0, 0.999055, 0, 380.0, 0, 1.1, 0.9 ],
[384, 1, 86.944845, 17.388969, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[385, 1, 109.741458, 21.948292, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[386, 1, 88.173972, 17.634794, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[387, 1, 179.569895, 35.913979, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[388, 1, 964.28771, 192.857542, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[389, 1, 0, 0, 0, 0, 0, 0.999912, 0, 380.0, 0, 1.1, 0.9 ],
[390, 1, 79.618981, 15.923796, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[391, 1, 90.692809, 18.138562, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[392, 1, 174.038589, 34.807718, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[393, 1, 217.341636, 43.468327, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[394, 1, 78.171538, 15.634308, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[395, 1, 108.340922, 21.668184, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[396, 1, 76.736766, 15.347353, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[397, 1, 615.344336, 123.068867, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[398, 1, 266.518924, 53.303785, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[399, 1, 113.556473, 22.711295, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[400, 1, 60.500986, 12.100197, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[401, 1, 0, 0, 0, 0, 0, 1.000677, 0, 380.0, 0, 1.1, 0.9 ],
[402, 1, 0, 0, 0, 0, 0, 1.000436, 0, 380.0, 0, 1.1, 0.9 ],
[403, 1, 30.040147, 6.008029, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[404, 1, 105.833295, 21.166659, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[405, 1, 797.878416, 159.575683, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[406, 1, 60.453087, 12.090617, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[407, 1, 119.66821, 23.933642, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[408, 1, 346.013355, 69.202671, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[409, 1, 0, 0, 0, 0, 0, 0.999958, 0, 380.0, 0, 1.1, 0.9 ],
[410, 1, 44.798315, 8.959663, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[411, 1, 42.358642, 8.471728, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[412, 1, 2.975231, 0.595046, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[413, 1, 148.528896, 29.705779, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[414, 1, 12.61171, 2.522342, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[415, 1, 0, 0, 0, 0, 0, 1.000312, 0, 380.0, 0, 1.1, 0.9 ],
[416, 1, 179.604023, 35.920805, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[417, 1, 7.027563, 1.405513, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[418, 1, 146.450173, 29.290035, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[419, 1, 78.276577, 15.655315, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[420, 1, 78.808605, 15.761721, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[421, 1, 113.521787, 22.704357, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[422, 1, 83.169866, 16.633973, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[423, 1, 174.675097, 34.935019, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[424, 1, 12.593624, 2.518725, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[425, 1, 103.425433, 20.685087, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[426, 1, 8.569115, 1.713823, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[427, 1, 72.014986, 14.402997, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[428, 1, 32.289285, 6.457857, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[429, 1, 364.376918, 72.875384, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[430, 1, 194.091051, 38.81821, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[431, 1, 129.791667, 25.958333, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[432, 1, 151.718496, 30.343699, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[433, 1, 77.554451, 15.51089, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[434, 1, 40.363113, 8.072623, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[435, 1, 161.427044, 32.285409, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[436, 1, 86.18319, 17.236638, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[437, 1, 19.627317, 3.925463, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[438, 1, 52.674345, 10.534869, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[439, 1, 98.072822, 19.614564, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[440, 1, 82.881556, 16.576311, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[441, 1, 63.539817, 12.707963, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[442, 1, 84.084687, 16.816937, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[443, 1, 182.303517, 36.460703, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[444, 1, 0, 0, 0, 0, 0, 0.999997, 0, 380.0, 0, 1.1, 0.9 ],
[445, 1, 82.836611, 16.567322, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[446, 1, 38.410594, 7.682119, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[447, 1, 73.026044, 14.605209, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[448, 1, 53.666726, 10.733345, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[449, 1, 270.605805, 54.121161, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[450, 1, 165.59784, 33.119568, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[451, 1, 70.760773, 14.152155, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[452, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9 ],
[453, 1, 47.423447, 9.484689, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[454, 1, 33.085725, 6.617145, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[455, 1, 53.94349, 10.788698, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[456, 1, 53.94349, 10.788698, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[457, 1, 165.431157, 33.086231, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[458, 1, 157.345889, 31.469178, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[459, 1, 191.495801, 38.29916, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[460, 1, 251.664937, 50.332987, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[461, 1, 261.786107, 52.357221, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[462, 1, 80.081727, 16.016345, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[463, 1, 41.034378, 8.206876, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[464, 1, 41.083979, 8.216796, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[465, 1, 66.361856, 13.272371, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[466, 1, 53.877431, 10.775486, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[467, 1, 49.719948, 9.94399, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[468, 1, 81.521062, 16.304212, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[469, 1, 50.516969, 10.103394, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[470, 1, 128.647331, 25.729466, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[471, 1, 126.664898, 25.33298, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[472, 1, 44.303564, 8.860713, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[473, 1, 81.351906, 16.270381, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[474, 1, 42.017409, 8.403482, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[475, 1, 41.233718, 8.246744, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[476, 1, 46.600885, 9.320177, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[477, 1, 75.203749, 15.04075, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[478, 1, 94.469615, 18.893923, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[479, 1, 171.199924, 34.239985, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[480, 1, 75.040029, 15.008006, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[481, 1, 65.168234, 13.033647, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[482, 1, 73.995726, 14.799145, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[483, 1, 62.927942, 12.585588, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[484, 1, 49.332446, 9.866489, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[485, 1, 73.689618, 14.737924, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[486, 1, 677.908485, 135.581697, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1, 0.9 ],
[487, 1, 171.778877, 34.355775, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[488, 1, 494.972714, 98.994543, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[489, 1, 130.27541, 26.055082, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[490, 1, 40.536849, 8.10737, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[491, 1, 55.738688, 11.147738, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[492, 1, 86.919491, 17.383898, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[493, 1, 112.02882, 22.405764, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[494, 1, 153.112663, 30.622533, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[495, 1, 120.527031, 24.105406, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[496, 1, 8.537131, 1.707426, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[497, 1, 1067.56553, 213.513106, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[498, 1, 50.067861, 10.013572, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[499, 1, 69.886531, 13.977306, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[500, 1, 38.262053, 7.652411, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[501, 1, 64.732797, 12.946559, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[502, 1, 255.486945, 51.097389, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[503, 1, 78.245685, 15.649137, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[504, 1, 51.23895, 10.24779, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[505, 1, 363.426388, 72.685278, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[506, 1, 114.075071, 22.815014, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[507, 1, 108.509533, 21.701907, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[508, 1, 157.749113, 31.549823, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[509, 1, 207.882042, 41.576408, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[510, 1, 131.331505, 26.266301, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[511, 1, 114.561196, 22.912239, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[512, 1, 75.674743, 15.134949, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[513, 1, 41.688708, 8.337742, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[514, 1, 103.75916, 20.751832, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[515, 1, 92.559335, 18.511867, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[516, 1, 103.552116, 20.710423, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[517, 1, 48.640908, 9.728182, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[518, 1, 273.948672, 54.789734, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[519, 1, 26.961565, 5.392313, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[520, 1, 108.854273, 21.770855, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[521, 1, 98.332374, 19.666475, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[522, 1, 84.192975, 16.838595, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[523, 1, 45.320121, 9.064024, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[524, 1, 131.541254, 26.308251, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[525, 1, 156.710188, 31.342038, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[526, 1, 47.511581, 9.502316, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[527, 1, 52.164378, 10.432876, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[528, 1, 113.853632, 22.770726, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[529, 1, 145.943497, 29.188699, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[530, 1, 61.844893, 12.368979, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[531, 1, 62.879916, 12.575983, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[532, 1, 60.353758, 12.070752, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[533, 1, 54.08425, 10.81685, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[534, 1, 149.194629, 29.838926, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[535, 1, 186.7821, 37.35642, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[536, 1, 147.224547, 29.444909, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[537, 1, 48.97554, 9.795108, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[538, 1, 36.610772, 7.322154, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[539, 1, 38.84628, 7.769256, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[540, 1, 34.979369, 6.995874, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[541, 1, 90.354729, 18.070946, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[542, 1, 124.119469, 24.823894, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[543, 1, 67.793444, 13.558689, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[544, 1, 126.266241, 25.253248, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[545, 1, 271.871924, 54.374385, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[546, 1, 136.266314, 27.253263, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[547, 1, 176.133164, 35.226633, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[548, 1, 57.015034, 11.403007, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[549, 1, 48.752729, 9.750546, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[550, 1, 40.229292, 8.045858, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[551, 1, 38.780067, 7.756013, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[552, 1, 192.577447, 38.515489, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[553, 1, 1.332338, 0.266468, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[554, 1, 195.101058, 39.020212, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[555, 1, 74.335663, 14.867133, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[556, 1, 114.999744, 22.999949, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[557, 1, 244.333084, 48.866617, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[558, 1, 144.073127, 28.814625, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[559, 1, 77.10655, 15.42131, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[560, 1, 120.458673, 24.091735, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[561, 1, 66.05602, 13.211204, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[562, 1, 180.460098, 36.09202, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[563, 1, 126.878156, 25.375631, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[564, 1, 250.521271, 50.104254, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[565, 1, 189.030713, 37.806143, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[566, 1, 0.303624, 0.060725, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[567, 1, 307.277901, 61.45558, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[568, 1, 284.157713, 56.831543, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[569, 1, 199.935363, 39.987073, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[570, 1, 312.135104, 62.427021, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[571, 1, 229.817616, 45.963523, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[572, 1, 405.359904, 81.071981, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[573, 1, 117.994953, 23.598991, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[574, 1, 224.825446, 44.965089, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[575, 1, 4.224873, 0.844975, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[576, 1, 273.386233, 54.677247, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[577, 1, 301.379822, 60.275964, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[578, 1, 287.747363, 57.549473, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[579, 1, 104.978091, 20.995618, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[580, 1, 21.854877, 4.370975, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[581, 1, 0.12558, 0.025116, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[582, 1, 79.071055, 15.814211, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[583, 1, 90.691594, 18.138319, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[584, 1, 52.034493, 10.406899, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[585, 1, 90.338282, 18.067656, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ]
])
ppc["gen"] = array([
[586, 47.326635, 0, 9999, -9999, 1.0, 100, 1, 272.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[589, 63.1, 0, 9999, -9999, 1.0, 100, 1, 63.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[590, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[593, 11.1, 0, 9999, -9999, 1.0, 100, 1, 11.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[594, 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[595, 876.332761, 0, 9999, -9999, 1.0, 100, 1, 4730.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[597, 95.0, 0, 9999, -9999, 1.0, 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[598, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[599, 9.3, 0, 9999, -9999, 1.0, 100, 1, 9.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[601, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[602, 24.6, 0, 9999, -9999, 1.0, 100, 1, 24.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[603, 486.918756, 0, 9999, -9999, 1.0, 100, 1, 3455.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[607, 1800.0, 0, 9999, -9999, 1.0, 100, 1, 1800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[608, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[609, 36.4, 0, 9999, -9999, 1.0, 100, 1, 36.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[610, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[612, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[613, 85.0, 0, 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[614, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[616, 29.0, 0, 9999, -9999, 1.0, 100, 1, 29.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[617, 137.0, 0, 9999, -9999, 1.0, 100, 1, 137.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[618, 33.4, 0, 9999, -9999, 1.0, 100, 1, 33.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[619, 118.0, 0, 9999, -9999, 1.0, 100, 1, 118.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[621, 765.0, 0, 9999, -9999, 1.0, 100, 1, 765.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[623, 760.0, 0, 9999, -9999, 1.0, 100, 1, 760.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[624, 27.0, 0, 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[628, 449.0, 0, 9999, -9999, 1.0, 100, 1, 449.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[629, 75.3, 0, 9999, -9999, 1.0, 100, 1, 75.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[631, 79.8, 0, 9999, -9999, 1.0, 100, 1, 79.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[632, 45.1, 0, 9999, -9999, 1.0, 100, 1, 45.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[637, 53.7, 0, 9999, -9999, 1.0, 100, 1, 53.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[638, 128.7, 0, 9999, -9999, 1.0, 100, 1, 128.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[639, 15.8, 0, 9999, -9999, 1.0, 100, 1, 15.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[640, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[641, 12.6, 0, 9999, -9999, 1.0, 100, 1, 12.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[642, 28.9, 0, 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[643, 857.0, 0, 9999, -9999, 1.0, 100, 1, 857.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[646, 103.0, 0, 9999, -9999, 1.0, 100, 1, 103.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[647, 14.0, 0, 9999, -9999, 1.0, 100, 1, 14.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[650, 1324.5, 0, 9999, -9999, 1.0, 100, 1, 1324.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[652, 46.9, 0, 9999, -9999, 1.0, 100, 1, 46.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[655, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[657, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[658, 95.0, 0, 9999, -9999, 1.0, 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[661, 32.7, 0, 9999, -9999, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[662, 9.2, 0, 9999, -9999, 1.0, 100, 1, 9.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[663, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[666, 28.9, 0, 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[668, 766.0, 0, 9999, -9999, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[670, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[672, 33.1, 0, 9999, -9999, 1.0, 100, 1, 33.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[676, 370.0, 0, 9999, -9999, 1.0, 100, 1, 370.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[677, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[678, 1017.0, 0, 9999, -9999, 1.0, 100, 1, 1017.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[679, 545.306254, 0, 9999, -9999, 1.0, 100, 1, 695.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[681, 40.1, 0, 9999, -9999, 1.0, 100, 1, 40.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[683, 27.5, 0, 9999, -9999, 1.0, 100, 1, 27.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[687, 1329.0, 0, 9999, -9999, 1.0, 100, 1, 1329.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[689, 310.0, 0, 9999, -9999, 1.0, 100, 1, 310.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[691, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[693, 194.0, 0, 9999, -9999, 1.0, 100, 1, 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[694, 16.4, 0, 9999, -9999, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[695, 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[696, 721.0, 0, 9999, -9999, 1.0, 100, 1, 721.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[697, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[698, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[699, 104.6, 0, 9999, -9999, 1.0, 100, 1, 104.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[700, 27.0, 0, 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[701, 47.2, 0, 9999, -9999, 1.0, 100, 1, 47.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[702, 73.4, 0, 9999, -9999, 1.0, 100, 1, 73.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[704, 508.0, 0, 9999, -9999, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[705, 17.0, 0, 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[707, 34.0, 0, 9999, -9999, 1.0, 100, 1, 34.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[708, 7.8, 0, 9999, -9999, 1.0, 100, 1, 7.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[711, 109.981679, 0, 9999, -9999, 1.0, 100, 1, 176.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[713, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[714, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[716, 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[717, 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[719, 1336.155648, 0, 9999, -9999, 1.0, 100, 1, 1958.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[721, 4.0, 0, 9999, -9999, 1.0, 100, 1, 4.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[722, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[723, 19.7, 0, 9999, -9999, 1.0, 100, 1, 19.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[724, 12.1, 0, 9999, -9999, 1.0, 100, 1, 12.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[725, 800.0, 0, 9999, -9999, 1.0, 100, 1, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[726, 126.0, 0, 9999, -9999, 1.0, 100, 1, 126.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[727, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[728, 510.0, 0, 9999, -9999, 1.0, 100, 1, 510.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[730, 633.2, 0, 9999, -9999, 1.0, 100, 1, 633.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[731, 715.079188, 0, 9999, -9999, 1.0, 100, 1, 895.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[732, 14.6, 0, 9999, -9999, 1.0, 100, 1, 14.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[733, 396.6, 0, 9999, -9999, 1.0, 100, 1, 396.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[735, 84.8, 0, 9999, -9999, 1.0, 100, 1, 84.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[736, 32.0, 0, 9999, -9999, 1.0, 100, 1, 32.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[737, 28.0, 0, 9999, -9999, 1.0, 100, 1, 28.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[738, 138.5, 0, 9999, -9999, 1.0, 100, 1, 138.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[739, 59.9, 0, 9999, -9999, 1.0, 100, 1, 59.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[741, 214.0, 0, 9999, -9999, 1.0, 100, 1, 214.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[742, 9.0, 0, 9999, -9999, 1.0, 100, 1, 9.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[743, 190.321651, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[745, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[746, 100.0, 0, 9999, -9999, 1.0, 100, 1, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[747, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[748, 110.0, 0, 9999, -9999, 1.0, 100, 1, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[749, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[750, 90.8, 0, 9999, -9999, 1.0, 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[758, 18.5, 0, 9999, -9999, 1.0, 100, 1, 18.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[760, 317.678066, 0, 9999, -9999, 1.0, 100, 1, 794.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[761, 15.7, 0, 9999, -9999, 1.0, 100, 1, 15.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[762, 1105.0, 0, 9999, -9999, 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[763, 20.3, 0, 9999, -9999, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[765, 59.0, 0, 9999, -9999, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[767, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[769, 43.3, 0, 9999, -9999, 1.0, 100, 1, 43.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[771, 690.0, 0, 9999, -9999, 1.0, 100, 1, 690.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[772, 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[774, 33.5, 0, 9999, -9999, 1.0, 100, 1, 33.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[775, 128.0, 0, 9999, -9999, 1.0, 100, 1, 128.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[776, 56.0, 0, 9999, -9999, 1.0, 100, 1, 56.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[777, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[778, 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[779, 34.2, 0, 9999, -9999, 1.0, 100, 1, 34.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[781, 977.621115, 0, 9999, -9999, 1.0, 100, 1, 1310.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[784, 780.2167, 0, 9999, -9999, 1.0, 100, 1, 1275.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[785, 3.0, 0, 9999, -9999, 1.0, 100, 1, 3.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[786, 195.4, 0, 9999, -9999, 1.0, 100, 1, 195.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[787, 778.0, 0, 9999, -9999, 1.0, 100, 1, 778.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[788, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[789, 77.4, 0, 9999, -9999, 1.0, 100, 1, 77.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[791, 10.0, 0, 9999, -9999, 1.0, 100, 1, 10.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[792, 62.7, 0, 9999, -9999, 1.0, 100, 1, 62.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[793, 9.8, 0, 9999, -9999, 1.0, 100, 1, 9.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[794, 0.2, 0, 9999, -9999, 1.0, 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[795, 13.6, 0, 9999, -9999, 1.0, 100, 1, 13.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[796, 85.1, 0, 9999, -9999, 1.0, 100, 1, 85.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[798, 209.378239, 0, 9999, -9999, 1.0, 100, 1, 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[800, 36.5, 0, 9999, -9999, 1.0, 100, 1, 36.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[801, 18.60032, 0, 9999, -9999, 1.0, 100, 1, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[802, 500.0, 0, 9999, -9999, 1.0, 100, 1, 500.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[805, 875.108011, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[806, 35.8, 0, 9999, -9999, 1.0, 100, 1, 35.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[808, 217.5, 0, 9999, -9999, 1.0, 100, 1, 217.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[809, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[811, 25.2, 0, 9999, -9999, 1.0, 100, 1, 25.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[814, 89.0, 0, 9999, -9999, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[816, 80.1, 0, 9999, -9999, 1.0, 100, 1, 80.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[817, 54.0, 0, 9999, -9999, 1.0, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[818, 216.869854, 0, 9999, -9999, 1.0, 100, 1, 757.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[821, 82.5, 0, 9999, -9999, 1.0, 100, 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[822, 134.0, 0, 9999, -9999, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[825, 42.7, 0, 9999, -9999, 1.0, 100, 1, 42.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[826, 58.0, 0, 9999, -9999, 1.0, 100, 1, 58.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[830, 89.0, 0, 9999, -9999, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[833, 18.6, 0, 9999, -9999, 1.0, 100, 1, 18.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[834, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[835, 63.7, 0, 9999, -9999, 1.0, 100, 1, 63.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[836, 25.5, 0, 9999, -9999, 1.0, 100, 1, 25.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[837, 472.0, 0, 9999, -9999, 1.0, 100, 1, 472.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[839, 73.3, 0, 9999, -9999, 1.0, 100, 1, 73.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[840, 90.722956, 0, 9999, -9999, 1.0, 100, 1, 1391.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[841, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[843, 333.0, 0, 9999, -9999, 1.0, 100, 1, 333.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[844, 40.0, 0, 9999, -9999, 1.0, 100, 1, 40.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[845, 318.0, 0, 9999, -9999, 1.0, 100, 1, 318.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[848, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[849, 779.0, 0, 9999, -9999, 1.0, 100, 1, 779.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[850, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[851, 79.5, 0, 9999, -9999, 1.0, 100, 1, 79.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[852, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[853, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[855, 688.0, 0, 9999, -9999, 1.0, 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[856, 36.0, 0, 9999, -9999, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[857, 1402.0, 0, 9999, -9999, 1.0, 100, 1, 1402.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[858, 56.8, 0, 9999, -9999, 1.0, 100, 1, 56.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[859, 85.0, 0, 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[860, 25.0, 0, 9999, -9999, 1.0, 100, 1, 25.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[862, 725.0, 0, 9999, -9999, 1.0, 100, 1, 725.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[863, 0.6, 0, 9999, -9999, 1.0, 100, 1, 0.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[864, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[865, 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[866, 260.44, 0, 9999, -9999, 1.0, 100, 1, 260.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[867, 769.0, 0, 9999, -9999, 1.0, 100, 1, 769.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[869, 1360.0, 0, 9999, -9999, 1.0, 100, 1, 1360.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[870, 58.4, 0, 9999, -9999, 1.0, 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[872, 22.5, 0, 9999, -9999, 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[873, 122.0, 0, 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[874, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[875, 24.4, 0, 9999, -9999, 1.0, 100, 1, 24.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[876, 58.4, 0, 9999, -9999, 1.0, 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[877, 24.8, 0, 9999, -9999, 1.0, 100, 1, 24.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[881, 1001.3, 0, 9999, -9999, 1.0, 100, 1, 1001.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[882, 17.4, 0, 9999, -9999, 1.0, 100, 1, 17.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[883, 18.0, 0, 9999, -9999, 1.0, 100, 1, 18.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[885, 490.0, 0, 9999, -9999, 1.0, 100, 1, 490.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[886, 2572.0, 0, 9999, -9999, 1.0, 100, 1, 2572.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[888, 35.1, 0, 9999, -9999, 1.0, 100, 1, 35.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[889, 9.5, 0, 9999, -9999, 1.0, 100, 1, 9.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[890, 48.0, 0, 9999, -9999, 1.0, 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[895, 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[896, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[897, 56.0, 0, 9999, -9999, 1.0, 100, 1, 56.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[898, 84.6, 0, 9999, -9999, 1.0, 100, 1, 84.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[899, 8.5, 0, 9999, -9999, 1.0, 100, 1, 8.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[900, 112.6, 0, 9999, -9999, 1.0, 100, 1, 112.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[902, 19.5, 0, 9999, -9999, 1.0, 100, 1, 19.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[903, 20.1, 0, 9999, -9999, 1.0, 100, 1, 20.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[905, 137.3, 0, 9999, -9999, 1.0, 100, 1, 137.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[906, 66.0, 0, 9999, -9999, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[907, 67.3, 0, 9999, -9999, 1.0, 100, 1, 67.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[909, 36.8, 0, 9999, -9999, 1.0, 100, 1, 36.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[913, 74.0, 0, 9999, -9999, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[915, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[917, 17.0, 0, 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[918, 38.5, 0, 9999, -9999, 1.0, 100, 1, 38.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[920, 12.8, 0, 9999, -9999, 1.0, 100, 1, 12.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[921, 124.0, 0, 9999, -9999, 1.0, 100, 1, 124.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[922, 164.0, 0, 9999, -9999, 1.0, 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[923, 146.0, 0, 9999, -9999, 1.0, 100, 1, 146.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[924, 11.7, 0, 9999, -9999, 1.0, 100, 1, 11.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[925, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[928, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[931, 217.1, 0, 9999, -9999, 1.0, 100, 1, 217.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[934, 181.07236, 0, 9999, -9999, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[935, 23.1, 0, 9999, -9999, 1.0, 100, 1, 23.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[936, 104.4, 0, 9999, -9999, 1.0, 100, 1, 104.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[937, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[939, 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[940, 29.6, 0, 9999, -9999, 1.0, 100, 1, 29.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[942, 51.9, 0, 9999, -9999, 1.0, 100, 1, 51.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[944, 25.4, 0, 9999, -9999, 1.0, 100, 1, 25.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[945, 35.0, 0, 9999, -9999, 1.0, 100, 1, 35.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[948, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[950, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[952, 31.7, 0, 9999, -9999, 1.0, 100, 1, 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[956, 43.862599, 0, 9999, -9999, 1.0, 100, 1, 65.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[957, 6.0, 0, 9999, -9999, 1.0, 100, 1, 6.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[958, 66.7, 0, 9999, -9999, 1.0, 100, 1, 66.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[959, 45.5, 0, 9999, -9999, 1.0, 100, 1, 45.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[960, 26.5, 0, 9999, -9999, 1.0, 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[963, 780.274633, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[965, 352.0, 0, 9999, -9999, 1.0, 100, 1, 352.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[966, 66.0, 0, 9999, -9999, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[967, 37.5, 0, 9999, -9999, 1.0, 100, 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[968, 54.0, 0, 9999, -9999, 0.999529, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[969, 56.9, 0, 9999, -9999, 0.999529, 100, 1, 56.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[971, 20.0, 0, 9999, -9999, 1.0, 100, 1, 20.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[972, 390.0, 0, 9999, -9999, 1.0, 100, 1, 390.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[973, 1347.0, 0, 9999, -9999, 1.0, 100, 1, 1347.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[975, 52.5, 0, 9999, -9999, 1.0, 100, 1, 52.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[976, 26.9, 0, 9999, -9999, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[977, 324.0, 0, 9999, -9999, 1.0, 100, 1, 324.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[978, 4.6, 0, 9999, -9999, 1.0, 100, 1, 4.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[981, 119.0, 0, 9999, -9999, 1.0, 100, 1, 119.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[982, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[983, 44.0, 0, 9999, -9999, 1.0, 100, 1, 44.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[984, 465.0, 0, 9999, -9999, 1.0, 100, 1, 465.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[985, 22.0, 0, 9999, -9999, 1.0, 100, 1, 22.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[986, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[987, 164.5, 0, 9999, -9999, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[988, 5.1, 0, 9999, -9999, 1.0, 100, 1, 5.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[990, 176.488981, 0, 9999, -9999, 1.0, 100, 1, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[993, 392.0, 0, 9999, -9999, 1.0, 100, 1, 392.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[994, 33.0, 0, 9999, -9999, 1.0, 100, 1, 33.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[995, 4.2, 0, 9999, -9999, 1.0, 100, 1, 4.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[996, 11.5, 0, 9999, -9999, 1.0, 100, 1, 11.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[997, 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[998, 423.0, 0, 9999, -9999, 1.0, 100, 1, 423.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[999, 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1000, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1002, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1003, 900.0, 0, 9999, -9999, 1.0, 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1007, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1008, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1010, 750.0, 0, 9999, -9999, 1.0, 100, 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1011, 18.7, 0, 9999, -9999, 1.0, 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1012, 2822.8416, 0, 9999, -9999, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1018, 175.9, 0, 9999, -9999, 1.0, 100, 1, 175.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1019, 120.0, 0, 9999, -9999, 1.0, 100, 1, 120.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1023, 0.2, 0, 9999, -9999, 1.0, 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1025, 113.6, 0, 9999, -9999, 1.0, 100, 1, 113.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1026, 655.6, 0, 9999, -9999, 1.0, 100, 1, 655.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1027, 42.143139, 0, 9999, -9999, 1.0, 100, 1, 48.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1028, 400.0, 0, 9999, -9999, 1.0, 100, 1, 400.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1029, 60.0, 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1030, 547.524827, 0, 9999, -9999, 1.0, 100, 1, 1018.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1031, 1447.199962, 0, 9999, -9999, 1.0, 100, 1, 1447.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1032, 37.130761, 0, 9999, -9999, 1.0, 100, 1, 153.510391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1033, 7.986228, 0, 9999, -9999, 1.0, 100, 1, 50.164506, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1034, 26.393282, 0, 9999, -9999, 1.0, 100, 1, 84.262779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1035, 10.23952, 0, 9999, -9999, 1.0, 100, 1, 49.886469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1036, 12.383139, 0, 9999, -9999, 1.0, 100, 1, 67.223077, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1037, 37.034771, 0, 9999, -9999, 1.0, 100, 1, 94.684044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1038, 32.273941, 0, 9999, -9999, 1.0, 100, 1, 85.798525, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1039, 16.259191, 0, 9999, -9999, 1.0, 100, 1, 132.724114, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1041, 23.220031, 0, 9999, -9999, 1.0, 100, 1, 204.187624, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1042, 5.897804, 0, 9999, -9999, 1.0, 100, 1, 52.70053, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1044, 17.843471, 0, 9999, -9999, 1.0, 100, 1, 36.163532, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1046, 79.936922, 0, 9999, -9999, 1.0, 100, 1, 106.787063, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1047, 7.020261, 0, 9999, -9999, 1.0, 100, 1, 13.029581, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1048, 40.016896, 0, 9999, -9999, 1.0, 100, 1, 71.656883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1049, 75.832425, 0, 9999, -9999, 1.0, 100, 1, 293.755375, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1050, 2.574285, 0, 9999, -9999, 1.0, 100, 1, 52.781606, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1051, 7.252584, 0, 9999, -9999, 1.0, 100, 1, 304.42978, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1052, 15.175791, 0, 9999, -9999, 1.0, 100, 1, 20.66869, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1053, 12.346923, 0, 9999, -9999, 1.0, 100, 1, 16.368087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1054, 216.322419, 0, 9999, -9999, 1.0, 100, 1, 273.855776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1055, 0.30681, 0, 9999, -9999, 1.0, 100, 1, 2.856069, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1056, 53.206333, 0, 9999, -9999, 1.0, 100, 1, 603.943953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1057, 100.378169, 0, 9999, -9999, 1.0, 100, 1, 426.979979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1058, 117.210047, 0, 9999, -9999, 1.0, 100, 1, 1055.735174, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1059, 43.35606, 0, 9999, -9999, 1.0, 100, 1, 414.871332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1060, 0.48885, 0, 9999, -9999, 1.0, 100, 1, 10.351632, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1061, 9.89646, 0, 9999, -9999, 1.0, 100, 1, 161.862597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1062, 0.28386, 0, 9999, -9999, 1.0, 100, 1, 2.878561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1063, 0.835661, 0, 9999, -9999, 1.0, 100, 1, 8.670916, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1064, 19.595506, 0, 9999, -9999, 1.0, 100, 1, 209.786524, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1065, 38.415656, 0, 9999, -9999, 1.0, 100, 1, 339.421643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1066, 11.779239, 0, 9999, -9999, 1.0, 100, 1, 134.399019, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1067, 0.006954, 0, 9999, -9999, 1.0, 100, 1, 32.653526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1072, 53.011687, 0, 9999, -9999, 1.0, 100, 1, 112.606433, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1073, 54.828335, 0, 9999, -9999, 1.0, 100, 1, 77.81765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1074, 80.822515, 0, 9999, -9999, 1.0, 100, 1, 153.592986, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1077, 0.830976, 0, 9999, -9999, 1.0, 100, 1, 26.120041, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1079, 37.497464, 0, 9999, -9999, 1.0, 100, 1, 72.327992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1080, 0.003424, 0, 9999, -9999, 1.0, 100, 1, 132.149983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1081, 39.02582, 0, 9999, -9999, 1.0, 100, 1, 405.642115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1082, 30.101628, 0, 9999, -9999, 1.0, 100, 1, 510.054159, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1083, 40.966551, 0, 9999, -9999, 1.0, 100, 1, 633.681488, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1084, 34.330508, 0, 9999, -9999, 1.0, 100, 1, 602.719371, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1085, 17.091912, 0, 9999, -9999, 1.0, 100, 1, 113.714399, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1086, 32.887191, 0, 9999, -9999, 1.0, 100, 1, 225.59917, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1087, 7.499037, 0, 9999, -9999, 1.0, 100, 1, 116.66597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1088, 3.179915, 0, 9999, -9999, 1.0, 100, 1, 36.782492, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1089, 21.693926, 0, 9999, -9999, 1.0, 100, 1, 384.449592, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1090, 68.505794, 0, 9999, -9999, 1.0, 100, 1, 89.140897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1091, 30.312336, 0, 9999, -9999, 1.0, 100, 1, 45.7939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1092, 44.920748, 0, 9999, -9999, 1.0, 100, 1, 54.002032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1093, 65.837792, 0, 9999, -9999, 1.0, 100, 1, 155.605298, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1094, 0.941636, 0, 9999, -9999, 1.0, 100, 1, 3.759038, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1095, 0.050289, 0, 9999, -9999, 1.0, 100, 1, 0.204951, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1096, 29.200726, 0, 9999, -9999, 1.0, 100, 1, 84.50612, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1097, 2.055263, 0, 9999, -9999, 1.0, 100, 1, 4.601122, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1098, 34.647847, 0, 9999, -9999, 1.0, 100, 1, 71.025499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1099, 166.043557, 0, 9999, -9999, 1.0, 100, 1, 290.937198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1101, 35.196912, 0, 9999, -9999, 1.0, 100, 1, 83.930665, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1102, 85.484282, 0, 9999, -9999, 1.0, 100, 1, 350.979988, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1103, 88.696046, 0, 9999, -9999, 1.0, 100, 1, 245.381701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1104, 0.046739, 0, 9999, -9999, 1.0, 100, 1, 0.206918, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1105, 0.763545, 0, 9999, -9999, 1.0, 100, 1, 2.178593, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1106, 0.538044, 0, 9999, -9999, 1.0, 100, 1, 2.289793, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1107, 9.315632, 0, 9999, -9999, 1.0, 100, 1, 76.221615, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1108, 46.051867, 0, 9999, -9999, 1.0, 100, 1, 320.422751, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1109, 0.188979, 0, 9999, -9999, 1.0, 100, 1, 0.77821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1110, 0.495025, 0, 9999, -9999, 1.0, 100, 1, 1.654557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1111, 9.399663, 0, 9999, -9999, 1.0, 100, 1, 89.637993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1112, 17.835069, 0, 9999, -9999, 1.0, 100, 1, 69.53429, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1113, 0.897166, 0, 9999, -9999, 1.0, 100, 1, 3.536361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1114, 1.424338, 0, 9999, -9999, 1.0, 100, 1, 13.446889, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1115, 14.79839, 0, 9999, -9999, 1.0, 100, 1, 50.575278, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1116, 8.410982, 0, 9999, -9999, 1.0, 100, 1, 32.601142, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1117, 22.537982, 0, 9999, -9999, 1.0, 100, 1, 90.792541, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1118, 1.619728, 0, 9999, -9999, 1.0, 100, 1, 8.725012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1119, 10.776742, 0, 9999, -9999, 1.0, 100, 1, 43.254023, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1120, 0.54849, 0, 9999, -9999, 1.0, 100, 1, 2.416001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1121, 0.128871, 0, 9999, -9999, 1.0, 100, 1, 0.540589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1122, 0.355104, 0, 9999, -9999, 1.0, 100, 1, 1.462883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1123, 0.279445, 0, 9999, -9999, 1.0, 100, 1, 1.464336, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1124, 0.319102, 0, 9999, -9999, 1.0, 100, 1, 1.288283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1125, 4.97339, 0, 9999, -9999, 1.0, 100, 1, 25.818899, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1126, 5.785037, 0, 9999, -9999, 1.0, 100, 1, 29.154893, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1127, 36.406066, 0, 9999, -9999, 1.0, 100, 1, 105.296621, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1128, 0.968241, 0, 9999, -9999, 1.0, 100, 1, 3.06139, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1129, 1.446507, 0, 9999, -9999, 1.0, 100, 1, 4.738747, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1130, 0.288868, 0, 9999, -9999, 1.0, 100, 1, 1.025754, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1131, 0.882575, 0, 9999, -9999, 1.0, 100, 1, 2.897078, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1132, 0.099503, 0, 9999, -9999, 1.0, 100, 1, 0.359497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1133, 0.171545, 0, 9999, -9999, 1.0, 100, 1, 0.719597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1134, 0.12121, 0, 9999, -9999, 1.0, 100, 1, 0.508453, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1135, 1.390811, 0, 9999, -9999, 1.0, 100, 1, 8.117819, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1136, 0.092905, 0, 9999, -9999, 1.0, 100, 1, 0.4027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1137, 0.470801, 0, 9999, -9999, 1.0, 100, 1, 3.669012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1138, 0.251948, 0, 9999, -9999, 1.0, 100, 1, 1.254278, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1139, 5.083469, 0, 9999, -9999, 1.0, 100, 1, 19.822769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1140, 4.060139, 0, 9999, -9999, 1.0, 100, 1, 28.389457, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1141, 31.800974, 0, 9999, -9999, 1.0, 100, 1, 119.46456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1142, 0.257109, 0, 9999, -9999, 1.0, 100, 1, 1.215733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1143, 3.996627, 0, 9999, -9999, 1.0, 100, 1, 25.239356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1144, 15.96963, 0, 9999, -9999, 1.0, 100, 1, 52.527382, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1145, 124.051607, 0, 9999, -9999, 1.0, 100, 1, 175.889627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1146, 0.20533, 0, 9999, -9999, 1.0, 100, 1, 0.861317, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1147, 12.694751, 0, 9999, -9999, 1.0, 100, 1, 45.703707, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1148, 6.218355, 0, 9999, -9999, 1.0, 100, 1, 17.645529, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1149, 1.909193, 0, 9999, -9999, 1.0, 100, 1, 8.556784, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1150, 0.742648, 0, 9999, -9999, 1.0, 100, 1, 3.62256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1151, 4.837401, 0, 9999, -9999, 1.0, 100, 1, 13.036113, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1152, 0.038213, 0, 9999, -9999, 1.0, 100, 1, 0.116518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1153, 0.013427, 0, 9999, -9999, 1.0, 100, 1, 0.068788, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1154, 0.031353, 0, 9999, -9999, 1.0, 100, 1, 0.160625, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1155, 0.221052, 0, 9999, -9999, 1.0, 100, 1, 0.609451, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1156, 4.092172, 0, 9999, -9999, 1.0, 100, 1, 16.022334, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1157, 1.612252, 0, 9999, -9999, 1.0, 100, 1, 4.354147, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1158, 0.260133, 0, 9999, -9999, 1.0, 100, 1, 1.04304, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1159, 3.421268, 0, 9999, -9999, 1.0, 100, 1, 13.498087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1160, 92.70123, 0, 9999, -9999, 1.0, 100, 1, 238.377761, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1161, 1.447396, 0, 9999, -9999, 1.0, 100, 1, 25.263391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1162, 66.108496, 0, 9999, -9999, 1.0, 100, 1, 502.409178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1163, 39.337378, 0, 9999, -9999, 1.0, 100, 1, 330.03194, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1164, 48.660165, 0, 9999, -9999, 1.0, 100, 1, 285.625412, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1165, 6.897139, 0, 9999, -9999, 1.0, 100, 1, 57.188579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1166, 39.149107, 0, 9999, -9999, 1.0, 100, 1, 83.277163, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1167, 1.266099, 0, 9999, -9999, 1.0, 100, 1, 5.05378, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1168, 0.406717, 0, 9999, -9999, 1.0, 100, 1, 1.345774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1169, 0.977901, 0, 9999, -9999, 1.0, 100, 1, 2.721845, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1170, 0.065614, 0, 9999, -9999, 1.0, 100, 1, 0.26599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1173, 54.517441, 0, 9999, -9999, 1.0, 100, 1, 254.253327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1174, 0.315409, 0, 9999, -9999, 1.0, 100, 1, 1.260082, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1175, 0.295585, 0, 9999, -9999, 1.0, 100, 1, 0.855454, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1176, 0.083542, 0, 9999, -9999, 1.0, 100, 1, 0.23222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1177, 10.259509, 0, 9999, -9999, 1.0, 100, 1, 27.87401, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1178, 0.601094, 0, 9999, -9999, 1.0, 100, 1, 3.167999, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1179, 0.298531, 0, 9999, -9999, 1.0, 100, 1, 1.306293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1180, 0.17058, 0, 9999, -9999, 1.0, 100, 1, 0.688545, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1181, 40.321989, 0, 9999, -9999, 1.0, 100, 1, 85.739557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1182, 51.507092, 0, 9999, -9999, 1.0, 100, 1, 99.319579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1183, 5.009306, 0, 9999, -9999, 1.0, 100, 1, 38.222575, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1184, 0.734019, 0, 9999, -9999, 1.0, 100, 1, 4.219005, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1185, 2.912064, 0, 9999, -9999, 1.0, 100, 1, 11.343971, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1186, 11.368637, 0, 9999, -9999, 1.0, 100, 1, 38.916368, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1187, 2.579245, 0, 9999, -9999, 1.0, 100, 1, 9.814574, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1188, 63.736435, 0, 9999, -9999, 1.0, 100, 1, 179.712741, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1189, 3.253623, 0, 9999, -9999, 1.0, 100, 1, 20.261805, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1190, 0.006209, 0, 9999, -9999, 1.0, 100, 1, 220.533673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1191, 0.0187, 0, 9999, -9999, 1.0, 100, 1, 73.079413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1196, 91.192992, 0, 9999, -9999, 1.0, 100, 1, 160.697956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1197, 45.154128, 0, 9999, -9999, 1.0, 100, 1, 90.592266, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1198, 10.781322, 0, 9999, -9999, 1.0, 100, 1, 39.819157, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1199, 129.096066, 0, 9999, -9999, 1.0, 100, 1, 201.421956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1200, 40.048819, 0, 9999, -9999, 1.0, 100, 1, 56.012408, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1203, 0.000518, 0, 9999, -9999, 1.0, 100, 1, 182.623256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1204, 6.696653, 0, 9999, -9999, 1.0, 100, 1, 47.541821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1211, 0.002325, 0, 9999, -9999, 1.0, 100, 1, 18.005229, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1212, 8.9e-05, 0, 9999, -9999, 1.0, 100, 1, 91.171888, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1213, 0.973196, 0, 9999, -9999, 1.0, 100, 1, 57.342704, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1214, 3.4e-05, 0, 9999, -9999, 1.0, 100, 1, 4.505907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1215, 0.054275, 0, 9999, -9999, 1.0, 100, 1, 2.252965, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1216, 1.253411, 0, 9999, -9999, 1.0, 100, 1, 67.754469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1217, 0.149386, 0, 9999, -9999, 1.0, 100, 1, 35.871617, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1218, 0.003217, 0, 9999, -9999, 1.0, 100, 1, 0.980482, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1219, 2.479626, 0, 9999, -9999, 1.0, 100, 1, 12.33953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1220, 3.839093, 0, 9999, -9999, 1.0, 100, 1, 30.597849, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1221, 2.695126, 0, 9999, -9999, 1.0, 100, 1, 593.230436, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1222, 7.161228, 0, 9999, -9999, 1.0, 100, 1, 211.057769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1225, 1.255556, 0, 9999, -9999, 1.0, 100, 1, 34.931481, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1226, 0.28775, 0, 9999, -9999, 1.0, 100, 1, 3.982858, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1228, 0.001483, 0, 9999, -9999, 1.0, 100, 1, 3.021367, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1229, 18.31617, 0, 9999, -9999, 1.0, 100, 1, 51.244222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1230, 0.000325, 0, 9999, -9999, 1.0, 100, 1, 1.681276, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1231, 1.923913, 0, 9999, -9999, 1.0, 100, 1, 33.55478, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1232, 4.63817, 0, 9999, -9999, 1.0, 100, 1, 75.075088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1233, 500.196383, 0, 9999, -9999, 1.0, 100, 1, 575.36828, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1235, 8.984787, 0, 9999, -9999, 1.0, 100, 1, 9.03734, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1236, 81.082668, 0, 9999, -9999, 1.0, 100, 1, 82.225035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1237, 1.527562, 0, 9999, -9999, 1.0, 100, 1, 14.605409, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1238, 4.660084, 0, 9999, -9999, 1.0, 100, 1, 188.691049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1239, 1.665042, 0, 9999, -9999, 1.0, 100, 1, 2.267706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1240, 37.421408, 0, 9999, -9999, 1.0, 100, 1, 339.51051, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1241, 18.885145, 0, 9999, -9999, 1.0, 100, 1, 385.361595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1242, 1.763094, 0, 9999, -9999, 1.0, 100, 1, 27.074038, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1243, 6.672629, 0, 9999, -9999, 1.0, 100, 1, 83.079842, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1244, 170.415315, 0, 9999, -9999, 1.0, 100, 1, 323.472536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1245, 0.893274, 0, 9999, -9999, 1.0, 100, 1, 8.080896, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1246, 29.939855, 0, 9999, -9999, 1.0, 100, 1, 57.127825, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1247, 0.002877, 0, 9999, -9999, 1.0, 100, 1, 21.833396, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1248, 35.172508, 0, 9999, -9999, 1.0, 100, 1, 91.958275, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1249, 6.814281, 0, 9999, -9999, 1.0, 100, 1, 76.135177, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1250, 2.321024, 0, 9999, -9999, 1.0, 100, 1, 30.830519, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1251, 0.763016, 0, 9999, -9999, 1.0, 100, 1, 23.404345, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1252, 0.866633, 0, 9999, -9999, 1.0, 100, 1, 14.887727, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1253, 10.241594, 0, 9999, -9999, 1.0, 100, 1, 64.502694, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1254, 27.7169, 0, 9999, -9999, 1.0, 100, 1, 82.278695, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1255, 0.662735, 0, 9999, -9999, 1.0, 100, 1, 3.818419, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1256, 2.741448, 0, 9999, -9999, 1.0, 100, 1, 15.091842, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1257, 16.932573, 0, 9999, -9999, 1.0, 100, 1, 88.95288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1258, 98.683252, 0, 9999, -9999, 1.0, 100, 1, 235.487329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1259, 20.411221, 0, 9999, -9999, 1.0, 100, 1, 109.288719, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1260, 2.232628, 0, 9999, -9999, 1.0, 100, 1, 20.168717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1261, 1.62711, 0, 9999, -9999, 1.0, 100, 1, 201.699555, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1267, 1.762778, 0, 9999, -9999, 1.0, 100, 1, 39.469006, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1274, 2.637835, 0, 9999, -9999, 1.0, 100, 1, 53.095629, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1275, 9.287573, 0, 9999, -9999, 1.0, 100, 1, 99.0753, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1276, 1.874038, 0, 9999, -9999, 1.0, 100, 1, 25.655641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1277, 5.207053, 0, 9999, -9999, 1.0, 100, 1, 65.611252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1278, 14.540024, 0, 9999, -9999, 1.0, 100, 1, 170.437781, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1282, 0.000138, 0, 9999, -9999, 1.0, 100, 1, 4.363037, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1283, 1273.446566, 0, 9999, -9999, 1.0, 100, 1, 1297.764428, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1287, 5.392165, 0, 9999, -9999, 1.0, 100, 1, 93.199628, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1288, 6.771115, 0, 9999, -9999, 1.0, 100, 1, 148.402692, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1289, 2.462386, 0, 9999, -9999, 1.0, 100, 1, 184.149235, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1290, 0.002137, 0, 9999, -9999, 1.0, 100, 1, 4.901974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1291, 8.652076, 0, 9999, -9999, 1.0, 100, 1, 98.293351, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1292, 0.483448, 0, 9999, -9999, 1.0, 100, 1, 41.682074, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1293, 0.20117, 0, 9999, -9999, 1.0, 100, 1, 2.402107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1294, 0.49885, 0, 9999, -9999, 1.0, 100, 1, 5.39743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1295, 0.4326, 0, 9999, -9999, 1.0, 100, 1, 5.873666, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1300, 1.338514, 0, 9999, -9999, 1.0, 100, 1, 23.74405, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1301, 3.056681, 0, 9999, -9999, 1.0, 100, 1, 60.863304, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1302, 0.036072, 0, 9999, -9999, 1.0, 100, 1, 4.877299, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1303, 0.000754, 0, 9999, -9999, 1.0, 100, 1, 4.335516, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1306, 0.392214, 0, 9999, -9999, 1.0, 100, 1, 1.827014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1307, 0.070954, 0, 9999, -9999, 1.0, 100, 1, 0.29894, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1308, 0.276331, 0, 9999, -9999, 1.0, 100, 1, 3.278321, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1312, 195.684185, 0, 9999, -9999, 1.0, 100, 1, 262.264924, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1317, 7.32081, 0, 9999, -9999, 1.0, 100, 1, 23.958574, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1319, 4.498958, 0, 9999, -9999, 1.0, 100, 1, 17.708276, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1323, 56.14053, 0, 9999, -9999, 1.0, 100, 1, 199.111909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1326, 10.182637, 0, 9999, -9999, 1.0, 100, 1, 56.928865, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1327, 10.455752, 0, 9999, -9999, 1.0, 100, 1, 50.796895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1328, 4.010462, 0, 9999, -9999, 1.0, 100, 1, 16.063343, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1331, 0.080057, 0, 9999, -9999, 1.0, 100, 1, 0.289238, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1336, 1.009232, 0, 9999, -9999, 1.0, 100, 1, 29.773035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1337, 91.485454, 0, 9999, -9999, 1.0, 100, 1, 121.31241, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1339, 2.005785, 0, 9999, -9999, 1.0, 100, 1, 10.086482, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1340, 58.628541, 0, 9999, -9999, 1.0, 100, 1, 70.098327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1346, 32.686762, 0, 9999, -9999, 1.0, 100, 1, 214.719215, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1348, 12.081109, 0, 9999, -9999, 1.0, 100, 1, 22.707927, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1349, 28.392849, 0, 9999, -9999, 1.0, 100, 1, 42.352342, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1356, 9.77708, 0, 9999, -9999, 1.0, 100, 1, 73.486231, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1357, 7.611677, 0, 9999, -9999, 1.0, 100, 1, 56.459913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1359, 4.046937, 0, 9999, -9999, 1.0, 100, 1, 70.633589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1360, 4.206305, 0, 9999, -9999, 1.0, 100, 1, 17.135983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1361, 14.984474, 0, 9999, -9999, 1.0, 100, 1, 63.207173, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1362, 19.170113, 0, 9999, -9999, 1.0, 100, 1, 79.107216, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1363, 0.001093, 0, 9999, -9999, 1.0, 100, 1, 0.036158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1364, 0.001363, 0, 9999, -9999, 1.0, 100, 1, 0.061068, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1365, 2.2e-05, 0, 9999, -9999, 1.0, 100, 1, 0.000456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1366, 0.102023, 0, 9999, -9999, 1.0, 100, 1, 1.229992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1372, 7.992948, 0, 9999, -9999, 1.0, 100, 1, 192.966588, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1373, 1.389051, 0, 9999, -9999, 1.0, 100, 1, 35.200257, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1374, 71.380724, 0, 9999, -9999, 1.0, 100, 1, 108.220146, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1375, 37.822803, 0, 9999, -9999, 1.0, 100, 1, 61.223816, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1376, 69.720118, 0, 9999, -9999, 1.0, 100, 1, 176.213655, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1377, 58.199323, 0, 9999, -9999, 1.0, 100, 1, 234.376272, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1378, 45.859926, 0, 9999, -9999, 1.0, 100, 1, 246.029906, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1379, 0.14096, 0, 9999, -9999, 1.0, 100, 1, 0.805984, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1380, 0.335839, 0, 9999, -9999, 1.0, 100, 1, 1.213356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1381, 0.184209, 0, 9999, -9999, 1.0, 100, 1, 1.01257, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1382, 13.813542, 0, 9999, -9999, 1.0, 100, 1, 138.839906, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1383, 21.399019, 0, 9999, -9999, 1.0, 100, 1, 109.821439, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1384, 1.198594, 0, 9999, -9999, 1.0, 100, 1, 4.669135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1385, 0.024343, 0, 9999, -9999, 1.0, 100, 1, 0.124455, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1386, 0.167167, 0, 9999, -9999, 1.0, 100, 1, 0.673858, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1387, 1.066413, 0, 9999, -9999, 1.0, 100, 1, 3.493561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1388, 0.256908, 0, 9999, -9999, 1.0, 100, 1, 0.928188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1389, 0.059104, 0, 9999, -9999, 1.0, 100, 1, 0.213536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1390, 1.139446, 0, 9999, -9999, 1.0, 100, 1, 3.732816, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1391, 0.133841, 0, 9999, -9999, 1.0, 100, 1, 0.521719, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1392, 4.971503, 0, 9999, -9999, 1.0, 100, 1, 19.306386, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1393, 0.252203, 0, 9999, -9999, 1.0, 100, 1, 1.376509, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1394, 0.208445, 0, 9999, -9999, 1.0, 100, 1, 1.077886, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1395, 0.013642, 0, 9999, -9999, 1.0, 100, 1, 0.073776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1396, 0.00417, 0, 9999, -9999, 1.0, 100, 1, 0.026112, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1397, 8.076645, 0, 9999, -9999, 1.0, 100, 1, 25.084545, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1398, 0.940882, 0, 9999, -9999, 1.0, 100, 1, 2.779641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1399, 3.713776, 0, 9999, -9999, 1.0, 100, 1, 17.868157, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1400, 0.236741, 0, 9999, -9999, 1.0, 100, 1, 1.297197, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1401, 18.486316, 0, 9999, -9999, 1.0, 100, 1, 89.339497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1402, 6.374645, 0, 9999, -9999, 1.0, 100, 1, 26.328902, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1403, 46.195768, 0, 9999, -9999, 1.0, 100, 1, 119.651672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1404, 54.270904, 0, 9999, -9999, 1.0, 100, 1, 134.800518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1405, 6.605109, 0, 9999, -9999, 1.0, 100, 1, 29.550802, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1406, 2.938927, 0, 9999, -9999, 1.0, 100, 1, 10.763987, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1407, 0.025825, 0, 9999, -9999, 1.0, 100, 1, 0.211614, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1408, 5.740956, 0, 9999, -9999, 1.0, 100, 1, 41.078698, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1409, 1.5687, 0, 9999, -9999, 1.0, 100, 1, 12.019786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1410, 5.314918, 0, 9999, -9999, 1.0, 100, 1, 37.466518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1411, 6.47493, 0, 9999, -9999, 1.0, 100, 1, 39.395367, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1412, 0.032719, 0, 9999, -9999, 1.0, 100, 1, 5.987601, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1413, 0.023058, 0, 9999, -9999, 1.0, 100, 1, 5.679791, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1414, 8.5e-05, 0, 9999, -9999, 1.0, 100, 1, 25.992489, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1418, 13.628823, 0, 9999, -9999, 1.0, 100, 1, 88.264613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1419, 4.359201, 0, 9999, -9999, 1.0, 100, 1, 33.260903, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1421, 0.026178, 0, 9999, -9999, 0.999529, 100, 1, 6.972369, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1422, 0.02267, 0, 9999, -9999, 1.0, 100, 1, 4.730495, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1423, 0.003041, 0, 9999, -9999, 1.0, 100, 1, 1.931017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1424, 205.502248, 0, 9999, -9999, 1.0, 100, 1, 219.092115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1425, 7.089408, 0, 9999, -9999, 1.0, 100, 1, 21.366402, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1426, 12.895597, 0, 9999, -9999, 1.0, 100, 1, 68.762602, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1427, 51.532387, 0, 9999, -9999, 1.0, 100, 1, 480.698671, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1428, 32.967042, 0, 9999, -9999, 1.0, 100, 1, 334.885743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1431, 78.980532, 0, 9999, -9999, 1.0, 100, 1, 227.662022, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1432, 6.308304, 0, 9999, -9999, 1.0, 100, 1, 12.058931, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1433, 1093.225383, 0, 9999, -9999, 1.0, 100, 1, 1289.241188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1434, 78.196858, 0, 9999, -9999, 1.0, 100, 1, 99.440014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1435, 62.720229, 0, 9999, -9999, 1.0, 100, 1, 86.713217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1436, 53.522086, 0, 9999, -9999, 1.0, 100, 1, 98.434116, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1437, 5.612357, 0, 9999, -9999, 1.0, 100, 1, 238.321958, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1438, 41.072054, 0, 9999, -9999, 1.0, 100, 1, 392.815158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1439, 31.289263, 0, 9999, -9999, 1.0, 100, 1, 99.103164, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1440, 0.082585, 0, 9999, -9999, 1.0, 100, 1, 0.833609, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1443, 58.478089, 0, 9999, -9999, 1.0, 100, 1, 103.005076, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1446, 40.070237, 0, 9999, -9999, 1.0, 100, 1, 758.547933, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1447, 0.499762, 0, 9999, -9999, 1.0, 100, 1, 89.477411, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1448, 3.354244, 0, 9999, -9999, 1.0, 100, 1, 7.523578, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1449, 36.4709, 0, 9999, -9999, 1.0, 100, 1, 95.437673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1450, 21.193068, 0, 9999, -9999, 1.0, 100, 1, 59.256809, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1451, 27.993599, 0, 9999, -9999, 1.0, 100, 1, 68.198838, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1452, 10.328116, 0, 9999, -9999, 1.0, 100, 1, 24.068921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1453, 9.5e-05, 0, 9999, -9999, 1.0, 100, 1, 64.93775, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1454, 5.891023, 0, 9999, -9999, 1.0, 100, 1, 155.126607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1455, 0.237412, 0, 9999, -9999, 1.0, 100, 1, 0.654438, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1456, 19.998371, 0, 9999, -9999, 1.0, 100, 1, 50.054822, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1457, 0.55431, 0, 9999, -9999, 1.0, 100, 1, 2.002672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1458, 0.068144, 0, 9999, -9999, 1.0, 100, 1, 0.246199, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1459, 0.968807, 0, 9999, -9999, 1.0, 100, 1, 5.309059, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1460, 5.997406, 0, 9999, -9999, 1.0, 100, 1, 101.498473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1461, 4.61741, 0, 9999, -9999, 1.0, 100, 1, 17.951737, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1462, 0.616784, 0, 9999, -9999, 1.0, 100, 1, 2.402686, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1463, 0.168278, 0, 9999, -9999, 1.0, 100, 1, 0.711207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1464, 8.306476, 0, 9999, -9999, 1.0, 100, 1, 218.884211, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1465, 1.336085, 0, 9999, -9999, 1.0, 100, 1, 5.299939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1466, 2.109579, 0, 9999, -9999, 1.0, 100, 1, 5.685017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1467, 0.543693, 0, 9999, -9999, 1.0, 100, 1, 2.096155, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1468, 6.402705, 0, 9999, -9999, 1.0, 100, 1, 23.789171, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1469, 9.990425, 0, 9999, -9999, 1.0, 100, 1, 65.007467, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1470, 49.691631, 0, 9999, -9999, 1.0, 100, 1, 78.965265, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1471, 113.337953, 0, 9999, -9999, 1.0, 100, 1, 159.165074, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1472, 2.427411, 0, 9999, -9999, 1.0, 100, 1, 11.980182, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1473, 2.100139, 0, 9999, -9999, 1.0, 100, 1, 8.362608, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1474, 0.510586, 0, 9999, -9999, 1.0, 100, 1, 1.398948, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1475, 0.135061, 0, 9999, -9999, 1.0, 100, 1, 0.39088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1476, 114.968527, 0, 9999, -9999, 1.0, 100, 1, 250.480113, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1477, 2.305445, 0, 9999, -9999, 1.0, 100, 1, 12.122974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1482, 0.072524, 0, 9999, -9999, 1.0, 100, 1, 17.51083, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1483, 0.822639, 0, 9999, -9999, 1.0, 100, 1, 3.599649, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1484, 0.00601, 0, 9999, -9999, 1.0, 100, 1, 0.02991, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1485, 0.113245, 0, 9999, -9999, 1.0, 100, 1, 0.563547, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1486, 0.582622, 0, 9999, -9999, 1.0, 100, 1, 2.89934, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1489, 0.028354, 0, 9999, -9999, 1.0, 100, 1, 0.118938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1490, 675.613992, 0, 9999, -9999, 1.0, 100, 1, 782.463701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1491, 6.722106, 0, 9999, -9999, 1.0, 100, 1, 84.622838, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1492, 10.405171, 0, 9999, -9999, 1.0, 100, 1, 229.927503, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1493, 4.588641, 0, 9999, -9999, 1.0, 100, 1, 83.557175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1494, 41.935608, 0, 9999, -9999, 1.0, 100, 1, 404.486733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1495, 7.676729, 0, 9999, -9999, 1.0, 100, 1, 66.920717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1500, 0.008709, 0, 9999, -9999, 1.0, 100, 1, 0.154817, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1501, 0.00044, 0, 9999, -9999, 1.0, 100, 1, 8.165333, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1503, 0.000454, 0, 9999, -9999, 1.0, 100, 1, 45.972187, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1504, 0.077847, 0, 9999, -9999, 1.0, 100, 1, 188.822836, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1512, 0.001465, 0, 9999, -9999, 1.0, 100, 1, 64.130052, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1513, 0.551953, 0, 9999, -9999, 1.0, 100, 1, 23.051786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1518, 0.11666, 0, 9999, -9999, 1.0, 100, 1, 0.670542, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1519, 0.008097, 0, 9999, -9999, 1.0, 100, 1, 0.04654, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
])
ppc["branch"] = array([
[586, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[589, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[590, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[593, 112, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[594, 114, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[595, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[597, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[598, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[599, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[601, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[602, 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[603, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[607, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[608, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[609, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[610, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[612, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[613, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[614, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[616, 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[617, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[618, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[619, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[621, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[623, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[624, 14, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[628, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[629, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[631, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[632, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[637, 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[638, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[639, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[640, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[641, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[642, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[643, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[646, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[647, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[650, 166, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[652, 167, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[655, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[657, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[658, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[661, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[662, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[663, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[666, 180, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[668, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[670, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[672, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[676, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[677, 190, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[678, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[679, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[681, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[683, 200, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[687, 202, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[689, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[691, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[693, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[694, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[695, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[696, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[697, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[698, 212, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[699, 213, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[700, 214, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[701, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[702, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[704, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[705, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[707, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[708, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[711, 224, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[713, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[714, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[716, 226, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[717, 227, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[719, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[721, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[722, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[723, 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[724, 238, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[725, 239, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[726, 240, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[727, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[728, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[730, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[731, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[732, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[733, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[735, 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[736, 256, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[737, 256, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[738, 258, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[739, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[741, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[742, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[743, 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[745, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[746, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[747, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[748, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[749, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[750, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[758, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[760, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[761, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[762, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[763, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[765, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[767, 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[769, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[771, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[772, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[774, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[775, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[776, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[777, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[778, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[779, 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[781, 303, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[784, 563, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[785, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[786, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[787, 308, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[788, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[789, 565, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[791, 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[792, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[793, 318, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[794, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[795, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[796, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[798, 324, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[800, 326, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[801, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[802, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[805, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[806, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[808, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[809, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[811, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[814, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[816, 335, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[817, 571, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[818, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[821, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[822, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[825, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[826, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[830, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[833, 348, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[834, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[835, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[836, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[837, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[839, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[840, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[841, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[843, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[844, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[845, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[848, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[849, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[850, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[851, 575, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[852, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[853, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[855, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[856, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[857, 365, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[858, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[859, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[860, 371, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[862, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[863, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[864, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[865, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[866, 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[867, 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[869, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[870, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[872, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[873, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[874, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[875, 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[876, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[877, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[881, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[882, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[883, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[885, 393, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[886, 394, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[888, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[889, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[890, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[895, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[896, 581, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[897, 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[898, 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[899, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[900, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[902, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[903, 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[905, 413, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[906, 414, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[907, 583, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[909, 417, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[913, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[915, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[917, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[918, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[920, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[921, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[922, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[923, 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[924, 433, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[925, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[928, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[931, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[934, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[935, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[936, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[937, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[939, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[940, 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[942, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[944, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[945, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[948, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[950, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[952, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[956, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[957, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[958, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[959, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[960, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[963, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[965, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[966, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[967, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[968, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[969, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[971, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[972, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[973, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[975, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[976, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[977, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[978, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[981, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[982, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[983, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[984, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[985, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[986, 64, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[987, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[988, 66, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[990, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[993, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[994, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[995, 509, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[996, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[997, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[998, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[999, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1000, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1002, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1003, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1007, 511, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1008, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1010, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1011, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1012, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1018, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1019, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1023, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1025, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1026, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1027, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1028, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1029, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1030, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1031, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1032, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1033, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1034, 4, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1035, 6, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1036, 7, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1037, 8, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1038, 9, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1039, 11, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1041, 16, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1042, 17, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1044, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1046, 25, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1047, 27, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1048, 28, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1049, 29, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1050, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1051, 33, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1052, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1053, 35, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1054, 36, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1055, 38, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1056, 39, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1057, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1058, 41, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1059, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1060, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1061, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1062, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1063, 48, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1064, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1065, 50, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1066, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1067, 53, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1072, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1073, 60, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1074, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1077, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1079, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1080, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1081, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1082, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1083, 73, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1084, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1085, 76, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1086, 77, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1087, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1088, 80, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1089, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1090, 82, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1091, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1092, 84, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1093, 85, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1094, 88, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1095, 89, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1096, 90, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1097, 91, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1098, 92, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1099, 93, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1101, 98, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1102, 101, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1103, 102, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1104, 103, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1105, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1106, 109, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1107, 110, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1108, 111, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1109, 112, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1110, 113, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1111, 114, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1112, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1113, 116, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1114, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1115, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1116, 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1117, 122, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1118, 126, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1119, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1120, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1121, 131, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1122, 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1123, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1124, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1125, 135, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1126, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1127, 137, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1128, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1129, 140, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1130, 141, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1131, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1132, 144, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1133, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1134, 146, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1135, 147, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1136, 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1137, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1138, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1139, 151, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1140, 152, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1141, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1142, 154, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1143, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1144, 158, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1145, 161, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1146, 162, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1147, 163, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1148, 164, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1149, 166, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1150, 167, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1151, 168, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1152, 169, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1153, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1154, 171, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1155, 172, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1156, 173, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1157, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1158, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1159, 176, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1160, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1161, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1162, 179, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1163, 180, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1164, 181, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1165, 182, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1166, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1167, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1168, 186, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1169, 187, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1170, 188, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1173, 192, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1174, 193, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1175, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1176, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1177, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1178, 198, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1179, 199, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1180, 200, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1181, 202, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1182, 203, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1183, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1184, 205, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1185, 206, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1186, 207, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1187, 208, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1188, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1189, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1190, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1191, 212, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1196, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1197, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1198, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1199, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1200, 222, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1203, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1204, 226, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1211, 237, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1212, 238, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1213, 239, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1214, 240, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1215, 241, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1216, 242, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1217, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1218, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1219, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1220, 251, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1221, 252, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1222, 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1225, 256, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1226, 257, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1228, 260, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1229, 263, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1230, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1231, 266, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1232, 267, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1233, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1235, 271, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1236, 272, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1237, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1238, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1239, 275, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1240, 276, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1241, 278, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1242, 281, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1243, 282, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1244, 283, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1245, 284, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1246, 285, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1247, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1248, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1249, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1250, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1251, 291, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1252, 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1253, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1254, 294, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1255, 295, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1256, 296, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1257, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1258, 298, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1259, 299, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1260, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1261, 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1267, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1274, 321, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1275, 322, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1276, 323, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1277, 324, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1278, 325, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1282, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1283, 331, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1287, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1288, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1289, 340, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1290, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1291, 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1292, 343, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1293, 344, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1294, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1295, 346, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1300, 353, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1301, 354, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1302, 355, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1303, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1306, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1307, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1308, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1312, 367, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1317, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1319, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1323, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1326, 384, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1327, 385, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1328, 386, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1331, 390, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1336, 395, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1337, 396, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1339, 398, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1340, 399, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1346, 407, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1348, 410, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1349, 411, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1356, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1357, 420, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1359, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1360, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1361, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1362, 425, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1363, 426, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1364, 427, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1365, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1366, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1372, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1373, 436, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1374, 437, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1375, 438, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1376, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1377, 440, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1378, 441, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1379, 442, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1380, 443, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1381, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1382, 446, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1383, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1384, 448, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1385, 449, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1386, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1387, 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1388, 453, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1389, 454, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1390, 455, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1391, 456, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1392, 457, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1393, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1394, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1395, 460, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1396, 461, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1397, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1398, 463, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1399, 464, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1400, 465, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1401, 466, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1402, 467, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1403, 468, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1404, 469, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1405, 470, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1406, 471, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1407, 472, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1408, 473, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1409, 474, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1410, 475, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1411, 476, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1412, 477, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1413, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1414, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1418, 483, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1419, 484, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1421, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1422, 487, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1423, 488, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1424, 489, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1425, 490, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1426, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1427, 492, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1428, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1431, 496, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1432, 497, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1433, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1434, 499, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1435, 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1436, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1437, 502, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1438, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1439, 504, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1440, 505, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1443, 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1446, 511, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1447, 512, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1448, 513, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1449, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1450, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1451, 516, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1452, 517, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1453, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1454, 519, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1455, 520, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1456, 521, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1457, 522, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1458, 523, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1459, 524, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1460, 525, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1461, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1462, 527, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1463, 528, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1464, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1465, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1466, 531, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1467, 532, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1468, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1469, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1470, 535, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1471, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1472, 537, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1473, 538, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1474, 539, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1475, 540, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1476, 541, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1477, 542, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1482, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1483, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1484, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1485, 550, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1486, 551, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1489, 555, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1490, 556, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1491, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1492, 558, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1493, 559, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1494, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1495, 561, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1500, 566, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1501, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1503, 569, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1504, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1512, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1513, 579, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1518, 584, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1519, 585, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1, 490, 0, 0.01433884297520661, 0.151691958358336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 43.375 ],
[3, 4, 0, 0.006291637811634348, 0.903417549506624, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 72.681 ],
[491, 6, 0, 0.011200661157024791, 0.118492839955776, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.882 ],
[7, 5, 0, 0.005794840720221606, 0.20802058859584005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.471 ],
[8, 9, 0, 0.0024379328254847646, 0.350063268897336, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 28.163 ],
[492, 11, 0, 0.018224793388429753, 0.0482004476327704, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.565 ],
[11, 493, 0, 0.030286942148760328, 0.08010209706571599, 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.809 ],
[492, 493, 0, 0.04521652892561983, 0.11958747011094399, 495.0, 495.0, 495.0, 0, 1, 1, -360, 68.39 ],
[494, 14, 0, 0.012990743801652892, 0.137430291356512, 991.0, 991.0, 991.0, 0, 2, 1, -360, 39.297 ],
[13, 15, 0, 0.007681959833795014, 0.27576354266704156, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 44.371 ],
[16, 5, 0, 0.006275623268698061, 0.22527950450957998, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 36.248000000000005 ],
[17, 18, 0, 0.04623522622347646, 0.9335989000302801, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 200.291 ],
[17, 12, 0, 0.0056020313942728535, 0.113118303398186, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.268 ],
[14, 495, 0, 0.0017957024793388433, 0.018996904156819597, 991.0, 991.0, 991.0, 0, 1, 1, -360, 5.432 ],
[494, 19, 0, 0.010246611570247935, 0.10839986031771602, 991.0, 991.0, 991.0, 0, 1, 1, -360, 30.996 ],
[20, 21, 0, 0.005415685595567867, 0.19440984828307922, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 31.281 ],
[20, 22, 0, 0.0049706544321329645, 0.713737278110032, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 57.42100000000001 ],
[497, 23, 0, 0.002190413223140496, 0.005793146490362, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.313 ],
[23, 499, 0, 0.020799669421487598, 0.22004164444829602, 991.0, 991.0, 991.0, 0, 1, 1, -360, 62.919 ],
[25, 26, 0, 0.00141845567867036, 0.050919084651523595, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 8.193 ],
[25, 22, 0, 0.0035578254847645433, 0.0319293051869808, 856.0, 856.0, 856.0, 0, 1, 1, -360, 10.275 ],
[23, 27, 0, 0.027738181818181818, 0.073361203699828, 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.95399999999999 ],
[28, 23, 0, 0.012841652892561981, 0.0339632611780132, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.423 ],
[8, 21, 0, 0.004948753462603878, 0.17764812836304802, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 28.584 ],
[9, 29, 0, 0.002212863573407202, 0.31774552934092004, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 25.563000000000002 ],
[30, 25, 0, 0.019958795013850415, 0.17911796401827998, 856.0, 856.0, 856.0, 0, 1, 1, -360, 57.641000000000005 ],
[31, 32, 0, 0.0299776084949446, 0.605319030583196, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 129.863 ],
[32, 33, 0, 0.016762234533725762, 0.33846927983213604, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 72.61399999999999 ],
[34, 35, 0, 0.001931900826446281, 0.020437759184893597, 991.0, 991.0, 991.0, 0, 2, 1, -360, 5.843999999999999 ],
[35, 36, 0, 0.0008730578512396695, 0.0092361605077588, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.641 ],
[490, 6, 0, 0.049352066115702475, 0.130525028606764, 495.0, 495.0, 495.0, 0, 1, 1, -360, 74.645 ],
[37, 10, 0, 0.02404639889196676, 0.485553838251812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 104.169 ],
[10, 38, 0, 0.006848799630657894, 0.13829351176534158, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 29.669 ],
[37, 38, 0, 0.01437834718372576, 1.1613317560186958, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 124.574 ],
[39, 40, 0, 0.04521629732222991, 0.913024308337812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 195.877 ],
[39, 41, 0, 0.017466989843005543, 0.35269996139852006, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 75.667 ],
[42, 41, 0, 0.031145429362880884, 0.6289001042979919, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 134.922 ],
[18, 42, 0, 0.03439750692520776, 0.6945672650962679, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 149.01 ],
[492, 43, 0, 0.01819173553719008, 0.192452068436848, 991.0, 991.0, 991.0, 0, 2, 1, -360, 55.03 ],
[44, 45, 0, 0.02562314049586777, 0.067767398802972, 495.0, 495.0, 495.0, 0, 1, 1, -360, 38.755 ],
[44, 505, 0, 0.006061487603305785, 0.0160312607980052, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.168 ],
[46, 12, 0, 0.0014741170360110802, 0.2116687641962416, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.029 ],
[47, 48, 0, 0.005344182825484765, 0.01199019212302604, 428.0, 428.0, 428.0, 0, 1, 1, -360, 7.7170000000000005 ],
[49, 50, 0, 0.0019151662049861494, 0.0171874439892256, 856.0, 856.0, 856.0, 0, 1, 1, -360, 5.531000000000001 ],
[31, 33, 0, 0.013475992613088641, 0.27211225959163604, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 58.378 ],
[31, 51, 0, 0.003518611495844875, 0.5052381383693519, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 40.647 ],
[52, 53, 0, 0.010464421745152355, 1.5025884408875438, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 120.885 ],
[52, 54, 0, 0.0076126500461911354, 0.1537174637168, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 32.978 ],
[506, 55, 0, 0.012634380165289257, 0.133660287181212, 991.0, 991.0, 991.0, 0, 1, 1, -360, 38.219 ],
[506, 507, 0, 0.044157355371900825, 0.11678619613628, 495.0, 495.0, 495.0, 0, 1, 1, -360, 66.788 ],
[57, 506, 0, 0.004687272727272727, 0.049587095736244, 991.0, 991.0, 991.0, 0, 1, 1, -360, 14.179 ],
[57, 58, 0, 0.014436363636363634, 0.0381809096340232, 495.0, 495.0, 495.0, 0, 1, 1, -360, 21.835 ],
[58, 506, 0, 0.019797685950413223, 0.052360391943288, 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.944000000000003 ],
[59, 60, 0, 0.019407548476454296, 0.174170863885556, 856.0, 856.0, 856.0, 0, 1, 1, -360, 56.049 ],
[508, 62, 0, 0.051111404958677685, 0.03379452026753001, 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.653 ],
[30, 61, 0, 0.03143698060941828, 0.28212765137935203, 856.0, 856.0, 856.0, 0, 1, 1, -360, 90.79 ],
[63, 506, 0, 0.027457190082644623, 0.072618044249872, 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.528999999999996 ],
[13, 64, 0, 0.0014816481994459833, 0.2127501654814608, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.116 ],
[65, 66, 0, 0.03778185595567867, 0.7629053006222161, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 163.671 ],
[59, 67, 0, 0.0051880193905817175, 0.046559297286324804, 856.0, 856.0, 856.0, 0, 1, 1, -360, 14.982999999999999 ],
[61, 67, 0, 0.012931440443213295, 0.1160517597580644, 856.0, 856.0, 856.0, 0, 1, 1, -360, 37.346 ],
[68, 69, 0, 0.011149584487534626, 0.4002427745096039, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 64.4 ],
[70, 69, 0, 0.009625346260387812, 0.345526355460808, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 55.596000000000004 ],
[71, 72, 0, 0.008878635734072021, 0.318721276477736, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.283 ],
[73, 74, 0, 0.012529547553116345, 0.253001288604392, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 54.278 ],
[37, 75, 0, 0.027459141274238225, 0.5544652029066119, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 118.95299999999999 ],
[72, 75, 0, 0.006688711911357341, 0.240108375006292, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 38.634 ],
[37, 72, 0, 0.036222068328739615, 0.7314094881920841, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 156.914 ],
[76, 77, 0, 0.004683777700831025, 0.6725445900750401, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 54.107 ],
[77, 51, 0, 0.00363183864265928, 0.5214964473447999, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 41.955 ],
[73, 72, 0, 0.025475069252077563, 0.514402082018968, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 110.35799999999999 ],
[18, 40, 0, 0.01302770083102493, 0.26306018504072, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 56.43600000000001 ],
[492, 45, 0, 0.0308703030303719, 0.18370114733484796, 743.0, 743.0, 743.0, 0, 1, 1, -360, 70.03699999999999 ],
[10, 74, 0, 0.030167359187465374, 0.609150547206812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 130.685 ],
[45, 511, 0, 0.08203371900826446, 0.05424014819960001, 248.0, 248.0, 248.0, 0, 1, 1, -360, 62.038000000000004 ],
[78, 32, 0, 0.013458795013850415, 0.48313777647302397, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 77.738 ],
[79, 80, 0, 0.0038086911357340715, 0.1367226831743568, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 21.999000000000002 ],
[81, 79, 0, 0.010767832409972299, 0.3865388099484561, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 62.195 ],
[34, 82, 0, 0.0015497520661157025, 0.00409874294399768, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.344 ],
[83, 84, 0, 0.00902611570247934, 0.0238720301499152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 13.652000000000001 ],
[83, 499, 0, 0.04179570247933885, 0.0276350398834796, 248.0, 248.0, 248.0, 0, 1, 1, -360, 31.608 ],
[85, 86, 0, 0.00802354570637119, 0.28802563884886, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 46.343999999999994 ],
[87, 86, 0, 0.01904968836565097, 0.683837154069184, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 110.031 ],
[88, 89, 0, 0.00380297520661157, 0.010058007429140002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.752000000000001 ],
[90, 86, 0, 0.012097818559556786, 0.434282055192244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 69.877 ],
[91, 86, 0, 9.26246537396122e-05, 0.013299992817559201, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 1.07 ],
[86, 92, 0, 0.0001852493074792244, 0.0066499964087796005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.07 ],
[86, 93, 0, 0.008152181440443215, 0.292643346635492, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 47.086999999999996 ],
[94, 86, 0, 0.012883829639889197, 0.46249792780547194, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 74.417 ],
[86, 95, 0, 0.010421052631578947, 0.37409026526870803, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 60.192 ],
[513, 517, 0, 0.0008733884297520661, 0.0023099144321748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.321 ],
[97, 66, 0, 0.03812777008310249, 0.34217338998058805, 856.0, 856.0, 856.0, 0, 1, 1, -360, 110.113 ],
[42, 98, 0, 0.003091759002770083, 0.44394630230884, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 35.716 ],
[99, 100, 0, 0.016371537396121884, 0.587698093837988, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 94.56200000000001 ],
[42, 101, 0, 0.008165339335180054, 0.29311568282888, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 47.163000000000004 ],
[102, 42, 0, 0.012403047091412742, 0.44523901189173193, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 71.64 ],
[103, 87, 0, 0.007073060941828254, 0.25390556381756, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 40.854 ],
[104, 103, 0, 0.0028852146814404432, 0.1035721403291428, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.665 ],
[105, 87, 0, 0.006406682825484765, 0.22998422159488002, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 37.005 ],
[106, 107, 0, 0.005714219759923823, 0.11538365264216799, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.754 ],
[108, 107, 0, 0.0025427631578947367, 0.09127896939786201, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.687000000000001 ],
[109, 106, 0, 0.003030470914127424, 0.10878648330773438, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 17.504 ],
[110, 111, 0, 0.019821849030470913, 0.7115558306889919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 114.491 ],
[87, 112, 0, 0.006135907202216068, 0.220264039928212, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.441 ],
[113, 87, 0, 0.003981648199445983, 0.14293141813921081, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 22.998 ],
[87, 85, 0, 0.011046225761772853, 0.3965324494097, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 63.803000000000004 ],
[110, 114, 0, 0.011665339335180056, 0.418757110306188, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 67.37899999999999 ],
[115, 116, 0, 0.007048925619834712, 0.07457124214588401, 991.0, 991.0, 991.0, 0, 1, 1, -360, 21.323 ],
[117, 118, 0, 0.005987534626038782, 0.21493782785077598, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.584 ],
[117, 119, 0, 0.0038738746537396117, 0.5562504472696961, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 44.751000000000005 ],
[117, 120, 0, 0.005886686288088643, 0.8452704781039522, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 68.003 ],
[121, 122, 0, 0.0021170360110803325, 0.0759964075574972, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.228 ],
[123, 124, 0, 0.0018386426592797783, 0.0660027680945204, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 10.62 ],
[125, 126, 0, 0.004941135734072022, 0.17737467056702802, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 28.54 ],
[127, 119, 0, 0.0029027008310249305, 0.1041998502705648, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.766 ],
[118, 128, 0, 0.007397160664819945, 0.265539950057812, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.726000000000006 ],
[121, 119, 0, 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.743 ],
[530, 527, 0, 0.022726611570247933, 0.060106736329903994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 34.374 ],
[125, 130, 0, 0.002931440443213297, 0.105231531956442, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.932000000000002 ],
[125, 123, 0, 0.0019078081717451524, 0.2739425623421336, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 22.039 ],
[131, 132, 0, 0.0035744459833795014, 0.12831385593973843, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.646 ],
[133, 123, 0, 0.003864439058171745, 0.13872389704704202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 22.320999999999998 ],
[524, 134, 0, 0.008092231404958678, 0.08560847143881999, 991.0, 991.0, 991.0, 0, 1, 1, -360, 24.479 ],
[135, 136, 0, 0.005242901662049862, 0.1882073282678, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.283 ],
[123, 131, 0, 0.003138331024930748, 0.1126583971045252, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.127 ],
[117, 128, 0, 0.010800034626038782, 0.38769479063117196, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 62.381 ],
[137, 521, 0, 0.013832396694214875, 0.14633421587532003, 991.0, 991.0, 991.0, 0, 2, 1, -360, 41.843 ],
[531, 514, 0, 0.0059504132231404955, 0.035409362037522, 743.0, 743.0, 743.0, 0, 1, 1, -360, 13.5 ],
[139, 521, 0, 0.021257520661157023, 0.05622132386323199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.152 ],
[140, 514, 0, 0.018527603305785127, 0.04900131122836401, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.023000000000003 ],
[522, 141, 0, 0.012168595041322314, 0.032183175718526795, 495.0, 495.0, 495.0, 0, 1, 1, -360, 18.405 ],
[142, 523, 0, 0.007060165289256198, 0.0746901476577608, 991.0, 991.0, 991.0, 0, 2, 1, -360, 21.357 ],
[530, 526, 0, 0.020281652892561983, 0.053640374808152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.676 ],
[140, 532, 0, 0.004669090909090909, 0.0123486871461184, 495.0, 495.0, 495.0, 0, 1, 1, -360, 7.062 ],
[142, 144, 0, 0.006678126721756199, 0.0397397958689204, 743.0, 743.0, 743.0, 0, 1, 1, -360, 15.151 ],
[140, 522, 0, 0.020450247933884298, 0.05408627047793199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.930999999999997 ],
[145, 146, 0, 0.028527603305785125, 0.07544904460236, 495.0, 495.0, 495.0, 0, 1, 1, -360, 43.148 ],
[147, 523, 0, 0.02461289256198347, 0.0650955220034416, 495.0, 495.0, 495.0, 0, 2, 1, -360, 37.227 ],
[144, 523, 0, 0.008479338842975206, 0.0224259292904064, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.825 ],
[139, 523, 0, 0.029245619834710742, 0.0193370088934308, 248.0, 248.0, 248.0, 0, 1, 1, -360, 22.116999999999997 ],
[140, 141, 0, 0.008362975206611572, 0.022118173847506, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.649000000000001 ],
[528, 526, 0, 0.015389090909090908, 0.0407006573227188, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.276 ],
[528, 148, 0, 0.014306115702479338, 0.0378364333712244, 495.0, 495.0, 495.0, 0, 1, 1, -360, 21.638 ],
[149, 150, 0, 0.013604628099173552, 0.035981157661543604, 495.0, 495.0, 495.0, 0, 1, 1, -360, 20.576999999999998 ],
[145, 528, 0, 0.00320595041322314, 0.0084790121737992, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.849 ],
[530, 151, 0, 0.013144462809917355, 0.0347641247737036, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.881 ],
[524, 152, 0, 0.014598347107438016, 0.03860931919944, 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.08 ],
[149, 525, 0, 0.016897190082644627, 0.17875695122823998, 991.0, 991.0, 991.0, 0, 2, 1, -360, 51.114 ],
[139, 514, 0, 0.007824132231404959, 0.020693056313687997, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.834000000000001 ],
[126, 120, 0, 0.012780297783933518, 0.458781387757004, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.819 ],
[530, 153, 0, 0.02254545454545455, 0.059627617060924, 495.0, 495.0, 495.0, 0, 1, 1, -360, 34.1 ],
[528, 147, 0, 0.15786710743801652, 0.104380679149868, 248.0, 248.0, 248.0, 0, 1, 1, -360, 119.387 ],
[528, 154, 0, 0.006528264462809917, 0.017265779790547203, 495.0, 495.0, 495.0, 0, 2, 1, -360, 9.874 ],
[130, 120, 0, 0.01450502077562327, 0.5206947188067639, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 83.781 ],
[528, 155, 0, 0.16064132231404957, 0.1062149715341, 248.0, 248.0, 248.0, 0, 1, 1, -360, 121.485 ],
[524, 533, 0, 0.004432727272727273, 0.0468942356109744, 991.0, 991.0, 991.0, 0, 1, 1, -360, 13.409 ],
[524, 149, 0, 0.0056413223140495865, 0.05968007537478799, 991.0, 991.0, 991.0, 0, 2, 1, -360, 17.065 ],
[154, 150, 0, 0.007539173553719007, 0.0199394052006688, 495.0, 495.0, 495.0, 0, 2, 1, -360, 11.402999999999999 ],
[157, 110, 0, 0.009962084487534625, 0.357614433044424, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 57.541000000000004 ],
[119, 158, 0, 0.0002490189289012004, 0.08045252664623159, 5134.0, 5134.0, 5134.0, 0, 3, 1, -360, 4.315 ],
[159, 60, 0, 0.010967451523545706, 0.0984261617997728, 856.0, 856.0, 856.0, 0, 1, 1, -360, 31.674 ],
[536, 161, 0, 0.021314380165289255, 0.056371704363524, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.238 ],
[115, 151, 0, 0.00379404958677686, 0.0401376047510724, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.477 ],
[162, 134, 0, 0.0015910743801652895, 0.016832124393744, 991.0, 991.0, 991.0, 0, 2, 1, -360, 4.813 ],
[115, 526, 0, 0.0037884297520661154, 0.010019537998747198, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.73 ],
[138, 87, 0, 0.0011838642659279777, 0.16999131006813442, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 13.675999999999998 ],
[123, 163, 0, 0.0022778739612188364, 0.08177009602828919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.157 ],
[112, 164, 0, 0.0008672957063711912, 0.12453516639176802, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 10.019 ],
[112, 165, 0, 0.005989439058171744, 0.21500619230086396, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.595 ],
[166, 165, 0, 0.002632790858725762, 0.09451074335350361, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.207 ],
[167, 537, 0, 0.00832595041322314, 0.08808100664460242, 991.0, 991.0, 991.0, 0, 2, 1, -360, 25.186 ],
[168, 104, 0, 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.743 ],
[531, 520, 0, 0.016156694214876033, 0.042730794079516396, 495.0, 495.0, 495.0, 0, 1, 1, -360, 24.436999999999998 ],
[139, 520, 0, 0.010682314049586776, 0.0282522993797748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.157 ],
[520, 169, 0, 0.0011328925619834712, 0.0119849761681232, 991.0, 991.0, 991.0, 0, 2, 1, -360, 3.427 ],
[168, 105, 0, 0.007340893351800554, 0.26352009133553606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.401 ],
[520, 170, 0, 0.005842644628099174, 0.015452470732151198, 495.0, 495.0, 495.0, 0, 2, 1, -360, 8.837 ],
[171, 89, 0, 0.005505454545454546, 0.058242717567848004, 991.0, 991.0, 991.0, 0, 1, 1, -360, 16.654 ],
[521, 172, 0, 0.006304793388429752, 0.06669899780522001, 991.0, 991.0, 991.0, 0, 1, 1, -360, 19.072 ],
[123, 173, 0, 0.005247403047091413, 0.18836891696656402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.309 ],
[521, 174, 0, 0.013300495867768597, 0.035176796844864404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 20.117 ],
[37, 39, 0, 0.004338873499549862, 0.35044859579205606, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 37.592 ],
[530, 175, 0, 0.013128595041322313, 0.0347221581224188, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.857 ],
[530, 176, 0, 0.005685289256198347, 0.01503630144005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.599 ],
[88, 530, 0, 0.006015867768595041, 0.0159106066755372, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.099 ],
[177, 496, 0, 0.018632066115702478, 0.19711036673178398, 991.0, 991.0, 991.0, 0, 2, 1, -360, 56.361999999999995 ],
[178, 525, 0, 0.03106842975206612, 0.08216895464241199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 46.99100000000001 ],
[179, 493, 0, 0.057079669421487594, 0.15096278779194802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.333 ],
[180, 181, 0, 0.041027438016528923, 0.10850827416682, 495.0, 495.0, 495.0, 0, 1, 1, -360, 62.053999999999995 ],
[182, 180, 0, 0.00866314049586777, 0.09164817200545601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 26.206 ],
[179, 181, 0, 0.01957223140495868, 0.051764115772731996, 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.603 ],
[180, 493, 0, 0.06676561983471074, 0.17657993119175203, 495.0, 495.0, 495.0, 0, 1, 1, -360, 100.98299999999999 ],
[183, 30, 0, 0.0024804362880886427, 0.356166349712776, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 28.654 ],
[183, 21, 0, 0.0025647506925207757, 0.36827307214930394, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 29.628 ],
[538, 185, 0, 0.018631404958677687, 0.0123189607681008, 248.0, 248.0, 248.0, 0, 1, 1, -360, 14.09 ],
[538, 89, 0, 0.014509752066115702, 0.038375005396288, 495.0, 495.0, 495.0, 0, 1, 1, -360, 21.945999999999998 ],
[184, 186, 0, 0.0016554709141274237, 0.059427351084826, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 9.562000000000001 ],
[184, 187, 0, 0.002698753462603878, 0.09687863927102919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.588 ],
[520, 172, 0, 0.0034188429752066113, 0.0361682589818792, 991.0, 991.0, 991.0, 0, 2, 1, -360, 10.342 ],
[89, 175, 0, 0.0037309090909090903, 0.0098674088877672, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.643 ],
[185, 89, 0, 0.005812892561983471, 0.0153737832609196, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.792 ],
[89, 188, 0, 0.003108760330578513, 0.008221966434607202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.702 ],
[189, 190, 0, 0.008599492151454294, 0.17364414688031998, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 37.253 ],
[539, 172, 0, 0.0021570247933884296, 0.022819366646419197, 991.0, 991.0, 991.0, 0, 2, 1, -360, 6.525 ],
[504, 192, 0, 0.0003084297520661157, 0.00326290713886456, 991.0, 991.0, 991.0, 0, 2, 1, -360, 0.9329999999999999 ],
[105, 186, 0, 0.003273372576177285, 0.1175060580379876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.907 ],
[105, 187, 0, 0.0021712257617728533, 0.0779416868808324, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.540999999999999 ],
[539, 193, 0, 0.005608595041322314, 0.01483346262541, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.482999999999999 ],
[187, 194, 0, 4.8649584487534626e-05, 0.0069856037041576, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.562 ],
[539, 540, 0, 0.004394710743801653, 0.0116230138006708, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.647 ],
[539, 196, 0, 0.00332297520661157, 0.008788516227194, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.026 ],
[197, 540, 0, 0.004737190082644629, 0.012528794024621601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 7.165 ],
[110, 198, 0, 0.00018724030470914128, 0.02688587333118328, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 2.1630000000000003 ],
[197, 539, 0, 0.009172231404958677, 0.024258473063998802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 13.873 ],
[199, 537, 0, 0.03612826446280991, 0.0238877676441712, 248.0, 248.0, 248.0, 0, 1, 1, -360, 27.322 ],
[134, 526, 0, 0.007771239669421488, 0.020553167475975197, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.754000000000001 ],
[200, 193, 0, 0.0009322314049586776, 0.009862163056380801, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.82 ],
[4, 201, 0, 0.013726108033240996, 0.49273365914097605, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 79.282 ],
[202, 86, 0, 0.00013365650969529087, 0.00479794133417816, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.772 ],
[85, 203, 0, 0.0019011426592797783, 0.2729854600553416, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 21.962 ],
[147, 204, 0, 0.0073874380165289254, 0.0781523963903056, 991.0, 991.0, 991.0, 0, 2, 1, -360, 22.346999999999998 ],
[147, 205, 0, 0.005959669421487603, 0.00394049369636956, 248.0, 248.0, 248.0, 0, 1, 1, -360, 4.507 ],
[123, 206, 0, 0.0005753116343490305, 0.0826091142668064, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 6.646 ],
[537, 207, 0, 0.018456198347107437, 0.048812461297776, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.915 ],
[165, 208, 0, 0.00414612188365651, 0.14883562055771601, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.948 ],
[4, 94, 0, 0.013687673130193905, 0.49135394025941603, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 79.06 ],
[4, 2, 0, 5.2054478301015697e-05, 0.016817654469309, 5134.0, 5134.0, 5134.0, 0, 3, 1, -360, 0.902 ],
[209, 4, 0, 0.0022369286703601107, 0.32120104149338397, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 25.840999999999998 ],
[119, 163, 0, 0.003535145429362881, 0.12690306230914922, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.419 ],
[210, 3, 0, 0.0003150969529085873, 0.011311208844832242, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.82 ],
[99, 211, 0, 0.0035045013850415513, 0.1258030161741948, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.242 ],
[99, 69, 0, 0.021717970914127423, 0.7796219621557, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 125.443 ],
[212, 99, 0, 0.008453774238227147, 0.30346978938770003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 48.82899999999999 ],
[213, 214, 0, 0.01490115702479339, 0.15764073118032798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 45.076 ],
[510, 215, 0, 0.002174710743801653, 0.09202587186721281, 1981.0, 1981.0, 1981.0, 0, 4, 1, -360, 13.157 ],
[128, 69, 0, 0.010711651662049862, 1.538088234801848, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 123.741 ],
[216, 69, 0, 0.009628462603878117, 1.3825528982351443, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 111.228 ],
[217, 98, 0, 0.0012787396121883656, 0.045903620070299994, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 7.386 ],
[504, 218, 0, 0.027480991735537193, 0.072680994226412, 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.565 ],
[177, 504, 0, 0.07054809917355372, 0.18658373169634002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 106.704 ],
[219, 209, 0, 0.003938798476454294, 0.5655728721401839, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 45.501000000000005 ],
[219, 220, 0, 0.0013026315789473684, 0.1870451326342096, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 15.048 ],
[94, 95, 0, 0.01070740997229917, 0.38436979242743197, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 61.846000000000004 ],
[159, 221, 0, 0.009937153739612188, 0.356719480257712, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 57.397 ],
[34, 161, 0, 0.010965289256198347, 0.116002818645824, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.17 ],
[222, 221, 0, 0.0046457756232686975, 0.16677196601221997, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 26.834 ],
[211, 52, 0, 0.05267313019390582, 0.472709090515552, 856.0, 856.0, 856.0, 0, 1, 1, -360, 152.12 ],
[215, 223, 0, 0.04873190082644628, 0.128884831985184, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.707 ],
[224, 215, 0, 0.019086280991735535, 0.050478887076288004, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.868000000000002 ],
[225, 224, 0, 0.04200925619834711, 0.11110496071615601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 63.538999999999994 ],
[224, 223, 0, 0.031061818181818183, 0.082151468537468, 495.0, 495.0, 495.0, 0, 1, 1, -360, 46.981 ],
[226, 6, 0, 0.06420099173553719, 0.0424492677936932, 248.0, 248.0, 248.0, 0, 1, 1, -360, 48.552 ],
[7, 3, 0, 0.009332929362880887, 0.335029305054692, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 53.907 ],
[216, 227, 0, 0.01989941135734072, 0.7143401282507, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 114.939 ],
[228, 229, 0, 0.010545454545454545, 0.027890337012274, 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.95 ],
[227, 230, 0, 0.003993074792243767, 0.573366419334696, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 46.128 ],
[231, 53, 0, 0.007193213296398893, 1.0328749562310842, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 83.096 ],
[544, 545, 0, 0.013061818181818181, 0.034545548464856, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.756 ],
[234, 235, 0, 0.04608859504132231, 0.121893887321888, 495.0, 495.0, 495.0, 0, 1, 1, -360, 69.709 ],
[546, 214, 0, 0.057025454545454546, 0.15081940173295602, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.251 ],
[233, 227, 0, 0.0029001038781163438, 0.1041066260218888, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.750999999999998 ],
[237, 238, 0, 0.026324628099173554, 0.06962267451304, 495.0, 495.0, 495.0, 0, 1, 1, -360, 39.816 ],
[212, 100, 0, 0.007955505540166205, 0.285583163531816, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 45.951 ],
[519, 239, 0, 0.01740429752066116, 0.046030422038308406, 495.0, 495.0, 495.0, 0, 1, 1, -360, 26.324 ],
[238, 519, 0, 0.015166280991735538, 0.040111375593995205, 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.939 ],
[213, 240, 0, 0.01665388429752066, 0.04404574915373599, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 25.189 ],
[241, 242, 0, 0.009862015235457064, 0.3540221919932281, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 56.963 ],
[70, 241, 0, 0.003819858033240997, 0.5484941897752321, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 44.126999999999995 ],
[509, 213, 0, 0.011363636363636364, 0.120216969880216, 991.0, 991.0, 991.0, 0, 2, 1, -360, 34.375 ],
[68, 243, 0, 0.003611668975069252, 0.1296500701715312, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.861 ],
[243, 244, 0, 0.0007699099722991691, 0.027637882270859202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 4.447 ],
[68, 244, 0, 0.004104051246537396, 0.147325387728876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.705 ],
[544, 547, 0, 0.02418776859504132, 0.255884661882476, 991.0, 991.0, 991.0, 0, 1, 1, -360, 73.168 ],
[245, 227, 0, 0.012676419667590028, 0.45505241780707606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.219 ],
[246, 208, 0, 0.0010155817174515235, 0.0364568961999408, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.8660000000000005 ],
[112, 208, 0, 0.0017927631578947367, 0.0643558063672372, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 10.355 ],
[165, 247, 0, 0.0002113919667590028, 0.0075884538459086, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.2209999999999999 ],
[537, 549, 0, 0.00032066115702479337, 0.00084807607842936, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.485 ],
[537, 550, 0, 0.00032198347107438016, 0.0008515732993697601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.48700000000000004 ],
[537, 551, 0, 0.0002651239669421488, 0.0007011927988648, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.401 ],
[110, 251, 0, 0.00023857340720221602, 0.008564200982522441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.3780000000000001 ],
[510, 252, 0, 0.08467702479338843, 0.055987884365424005, 248.0, 248.0, 248.0, 0, 1, 1, -360, 64.03699999999999 ],
[529, 253, 0, 0.04859504132231405, 0.12852286961777998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.5 ],
[237, 239, 0, 0.03309421487603306, 0.08752669712542799, 495.0, 495.0, 495.0, 0, 1, 1, -360, 50.055 ],
[254, 238, 0, 0.07815008264462811, 0.05167231372274401, 248.0, 248.0, 248.0, 0, 1, 1, -360, 59.101000000000006 ],
[69, 255, 0, 0.0009369806094182826, 0.134541235754472, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 10.824000000000002 ],
[510, 225, 0, 0.021953719008264466, 0.232250442756508, 991.0, 991.0, 991.0, 0, 1, 1, -360, 66.41 ],
[256, 257, 0, 0.010125619834710746, 0.0267799693631888, 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.315 ],
[258, 190, 0, 0.011717451523545707, 0.10515695255750121, 856.0, 856.0, 856.0, 0, 1, 1, -360, 33.84 ],
[258, 259, 0, 0.015782548476454293, 0.1416387085570408, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.58 ],
[260, 261, 0, 0.006791031855955679, 0.9751256416231477, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 78.45 ],
[554, 553, 0, 0.17583338842975205, 0.11625986438453201, 248.0, 248.0, 248.0, 0, 1, 1, -360, 132.974 ],
[515, 263, 0, 0.006987107438016529, 0.0739172618295936, 991.0, 991.0, 991.0, 0, 2, 1, -360, 21.136 ],
[14, 264, 0, 0.01700694214876033, 0.17991802858084, 991.0, 991.0, 991.0, 0, 1, 1, -360, 51.446000000000005 ],
[116, 555, 0, 0.0009768595041322315, 0.0103342878835768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.955 ],
[151, 116, 0, 0.007244958677685951, 0.0191612735410668, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.958 ],
[111, 114, 0, 0.008806613573407202, 0.3161358573133961, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.867 ],
[77, 111, 0, 0.00288452216066482, 0.41418912211817605, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 33.321999999999996 ],
[266, 525, 0, 0.01042909090909091, 0.027582581569373602, 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.774000000000001 ],
[267, 120, 0, 0.013136945983379503, 0.471584184581432, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 75.87899999999999 ],
[268, 269, 0, 0.0010327272727272726, 0.0027313295556817604, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.5619999999999998 ],
[556, 271, 0, 0.052289586776859506, 0.0345735262323792, 248.0, 248.0, 248.0, 0, 1, 1, -360, 39.544000000000004 ],
[556, 272, 0, 0.04685355371900827, 0.030979257409249603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 35.433 ],
[529, 273, 0, 0.0034604958677685953, 0.009152227205140799, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.234 ],
[128, 274, 0, 0.0029350761772853184, 0.1053620459045884, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.953 ],
[34, 275, 0, 0.0008290909090909092, 0.00054818938265696, 248.0, 248.0, 248.0, 0, 1, 1, -360, 0.627 ],
[503, 276, 0, 0.006707438016528925, 0.07095861291266, 991.0, 991.0, 991.0, 0, 2, 1, -360, 20.29 ],
[503, 504, 0, 0.06432727272727272, 0.680524223098808, 991.0, 991.0, 991.0, 0, 2, 1, -360, 194.59 ],
[177, 218, 0, 0.04330380165289256, 0.114528740018308, 495.0, 495.0, 495.0, 0, 1, 1, -360, 65.497 ],
[277, 278, 0, 0.007191135734072023, 1.032576638635032, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 83.072 ],
[557, 558, 0, 0.04341289256198347, 0.258338836678648, 743.0, 743.0, 743.0, 0, 1, 1, -360, 98.493 ],
[557, 559, 0, 0.03415867768595042, 0.09034195998366001, 495.0, 495.0, 495.0, 0, 1, 1, -360, 51.665 ],
[559, 558, 0, 0.04474314049586777, 0.11833546501370001, 495.0, 495.0, 495.0, 0, 1, 1, -360, 67.67399999999999 ],
[277, 78, 0, 0.03585768698060942, 0.32180078416049196, 856.0, 856.0, 856.0, 0, 1, 1, -360, 103.557 ],
[277, 279, 0, 0.021390927977839334, 0.191970480441328, 856.0, 856.0, 856.0, 0, 1, 1, -360, 61.777 ],
[78, 279, 0, 0.015811980609418283, 0.1419028439283376, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.665 ],
[281, 282, 0, 0.0023178670360110803, 0.08320574945862161, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.388 ],
[283, 161, 0, 0.036741157024793386, 0.09717203248350399, 495.0, 495.0, 495.0, 0, 2, 1, -360, 55.571000000000005 ],
[268, 161, 0, 0.018883636363636366, 0.199771751868832, 991.0, 991.0, 991.0, 0, 2, 1, -360, 57.123000000000005 ],
[256, 284, 0, 0.010755371900826446, 0.113782083346976, 991.0, 991.0, 991.0, 0, 2, 1, -360, 32.535 ],
[515, 516, 0, 0.04071140495867769, 0.107672438361532, 495.0, 495.0, 495.0, 0, 1, 1, -360, 61.576 ],
[263, 516, 0, 0.0030355371900826445, 0.128452925198488, 1981.0, 1981.0, 1981.0, 0, 2, 1, -360, 18.365 ],
[516, 285, 0, 0.006908429752066116, 0.018271230811372, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.449000000000002 ],
[63, 286, 0, 0.019088925619834708, 0.050485881518556, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.872 ],
[287, 516, 0, 0.01732892561983471, 0.011457770111127998, 248.0, 248.0, 248.0, 0, 1, 1, -360, 13.105 ],
[8, 102, 0, 0.015100069252077563, 0.542055501663692, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 87.21799999999999 ],
[8, 101, 0, 0.019246883656509697, 0.69091598202144, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 111.17 ],
[80, 288, 0, 0.007984072022160666, 0.2866086302684072, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 46.11600000000001 ],
[80, 289, 0, 0.0003782317636201524, 0.122198345223416, 5134.0, 5134.0, 5134.0, 0, 4, 1, -360, 6.553999999999999 ],
[276, 560, 0, 0.01778314049586777, 0.047032375838192794, 495.0, 495.0, 495.0, 0, 2, 1, -360, 26.897 ],
[37, 290, 0, 0.005629501385041551, 0.4546919507138321, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 48.773999999999994 ],
[290, 74, 0, 0.02071595106187673, 1.673216783321968, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 179.483 ],
[512, 291, 0, 0.0053299173553719, 0.056385693247479204, 991.0, 991.0, 991.0, 0, 2, 1, -360, 16.123 ],
[78, 292, 0, 0.0058149815327908595, 0.469673087481408, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 50.381 ],
[199, 548, 0, 0.0015530578512396695, 0.00410748599634868, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.349 ],
[491, 293, 0, 0.014176528925619833, 0.009373426429729999, 248.0, 248.0, 248.0, 0, 1, 1, -360, 10.720999999999998 ],
[4, 294, 0, 9.669321329639889e-05, 0.013884198109531681, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 1.117 ],
[490, 541, 0, 0.050580495867768596, 0.133773946861896, 495.0, 495.0, 495.0, 0, 1, 1, -360, 76.503 ],
[491, 295, 0, 0.010613553719008264, 0.028070443890777202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.053 ],
[491, 296, 0, 0.004400661157024794, 0.0116387512948784, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.656000000000001 ],
[295, 297, 0, 0.020297520661157024, 0.053682341459340005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.7 ],
[508, 161, 0, 0.023239669421487603, 0.061463658055360006, 495.0, 495.0, 495.0, 0, 1, 1, -360, 35.15 ],
[117, 123, 0, 0.005876211911357341, 0.21094161505628, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.941 ],
[133, 117, 0, 0.004469182825484764, 0.0401081792747688, 856.0, 856.0, 856.0, 0, 1, 1, -360, 12.907 ],
[71, 74, 0, 0.03904524469065097, 0.7884161162841721, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 169.144 ],
[74, 278, 0, 0.0077122576177285325, 1.10740463560792, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 89.09200000000001 ],
[298, 515, 0, 0.021701157024793388, 0.05739464148919599, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.823 ],
[5, 299, 0, 0.0016232686980609415, 0.058271370400665996, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 9.376 ],
[32, 292, 0, 0.009679362880886427, 0.34746541983297996, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 55.908 ],
[5, 29, 0, 0.00743395083102493, 1.0674425076571843, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 85.87700000000001 ],
[503, 560, 0, 0.015140495867768593, 0.160172719142436, 991.0, 991.0, 991.0, 0, 1, 1, -360, 45.8 ],
[300, 301, 0, 0.004892053324099723, 0.7024509290644521, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 56.513000000000005 ],
[51, 300, 0, 0.002573493767313019, 0.3695284920307039, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 29.729 ],
[244, 302, 0, 0.007714508310249307, 1.107727813004004, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 89.118 ],
[31, 302, 0, 0.004369113573407203, 0.6273619041941161, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 50.472 ],
[51, 282, 0, 0.006288434903047093, 0.9029576432132521, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 72.64399999999999 ],
[303, 304, 0, 8.795013850415512e-05, 0.000789298639172312, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.254 ],
[305, 304, 0, 0.003881117266849031, 0.0783689646873844, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 16.813 ],
[305, 259, 0, 0.0025625, 0.36794989475177603, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 29.601999999999997 ],
[306, 307, 0, 0.03223268698060942, 0.289268628831688, 856.0, 856.0, 856.0, 0, 1, 1, -360, 93.088 ],
[305, 308, 0, 0.0024272853185595567, 0.0217833994511184, 856.0, 856.0, 856.0, 0, 1, 1, -360, 7.01 ],
[305, 309, 0, 0.011014773776523545, 0.22241441259921202, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 47.716 ],
[310, 309, 0, 0.009565962603878117, 0.343394627639832, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 55.253 ],
[306, 309, 0, 0.035333795013850415, 0.31709917455019604, 856.0, 856.0, 856.0, 0, 1, 1, -360, 102.044 ],
[311, 280, 0, 0.003433691135734072, 0.1232611016590444, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 19.833 ],
[280, 278, 0, 0.009749769159764544, 0.7874838737974121, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 84.47200000000001 ],
[311, 32, 0, 0.01205909510619806, 0.9740069506375919, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 104.48 ],
[13, 312, 0, 0.0043324965373961214, 0.622104056565324, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 50.049 ],
[313, 314, 0, 0.006092624653739613, 0.218710302449316, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.191 ],
[312, 313, 0, 0.00893957756232687, 0.32090893884734, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.635 ],
[547, 566, 0, 0.027035702479338848, 0.286013220297816, 991.0, 991.0, 991.0, 0, 1, 1, -360, 81.783 ],
[245, 315, 0, 0.014162569252077564, 0.508401547875772, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 81.803 ],
[312, 316, 0, 8.803670360110802e-05, 0.01264120812658816, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 1.0170000000000001 ],
[312, 314, 0, 0.005339854570637119, 0.191687700220296, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.843000000000004 ],
[554, 546, 0, 0.08174743801652892, 0.21620344446439202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 123.64299999999999 ],
[262, 216, 0, 0.042641966759002774, 0.38268554099981195, 856.0, 856.0, 856.0, 0, 1, 1, -360, 123.15 ],
[317, 233, 0, 0.005647276084951523, 0.114031901035644, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.464000000000002 ],
[318, 317, 0, 0.008311634349030471, 0.16783161497270002, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 36.006 ],
[231, 52, 0, 0.035263677285318554, 1.2658796434850879, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 203.683 ],
[319, 567, 0, 0.006089586776859504, 0.0644223069721, 991.0, 991.0, 991.0, 0, 1, 1, -360, 18.421 ],
[557, 321, 0, 0.010004628099173555, 0.10583989458750401, 991.0, 991.0, 991.0, 0, 2, 1, -360, 30.264 ],
[277, 65, 0, 0.009430170821779778, 0.7616700793261759, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 81.703 ],
[322, 288, 0, 0.006545013850415513, 0.528637424797136, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 56.706 ],
[322, 323, 0, 0.0018503000923372577, 0.14944779312484, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 16.031 ],
[277, 324, 0, 0.019719529085872576, 0.39818407235049996, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 85.425 ],
[324, 325, 0, 0.01103508771932133, 0.22282459929396403, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 47.803999999999995 ],
[277, 325, 0, 0.008665743305609418, 0.174981914850048, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 37.54 ],
[326, 327, 0, 0.007654214876033058, 0.0202436634226288, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.577 ],
[328, 326, 0, 0.10300958677685952, 0.068109252150368, 248.0, 248.0, 248.0, 0, 1, 1, -360, 77.90100000000001 ],
[328, 327, 0, 0.09827173553719008, 0.064976616491468, 248.0, 248.0, 248.0, 0, 1, 1, -360, 74.318 ],
[326, 329, 0, 0.028062148760330575, 0.07421802283046801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.443999999999996 ],
[568, 329, 0, 0.05699900826446282, 0.15074945731414802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.211 ],
[568, 326, 0, 0.03218644628099173, 0.08512585494846397, 495.0, 495.0, 495.0, 0, 1, 1, -360, 48.681999999999995 ],
[332, 78, 0, 0.006471029547541551, 0.522661750455416, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 56.065 ],
[333, 306, 0, 0.008580159279778392, 0.308006702824228, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 49.559 ],
[332, 333, 0, 0.007504674515235457, 0.26939943395502003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 43.347 ],
[332, 334, 0, 0.017124653739612188, 0.15368328149175597, 856.0, 856.0, 856.0, 0, 1, 1, -360, 49.456 ],
[66, 334, 0, 0.030625, 0.27484062260471603, 856.0, 856.0, 856.0, 0, 1, 1, -360, 88.445 ],
[330, 335, 0, 0.00550536703601108, 0.790516769355108, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 63.598 ],
[336, 66, 0, 0.015054362880886425, 0.1351036887216764, 856.0, 856.0, 856.0, 0, 1, 1, -360, 43.477 ],
[330, 336, 0, 0.039036357340720224, 0.350327404269788, 856.0, 856.0, 856.0, 0, 1, 1, -360, 112.73700000000001 ],
[68, 70, 0, 0.016314058171745152, 0.14640868261713597, 856.0, 856.0, 856.0, 0, 1, 1, -360, 47.115 ],
[509, 337, 0, 0.03494082644628099, 0.09241056617056001, 495.0, 495.0, 495.0, 0, 1, 1, -360, 52.848 ],
[324, 288, 0, 0.012627423822714683, 0.11332339674541761, 856.0, 856.0, 856.0, 0, 1, 1, -360, 36.468 ],
[338, 559, 0, 0.009228099173553718, 0.097624922595552, 991.0, 991.0, 991.0, 0, 2, 1, -360, 27.915 ],
[339, 559, 0, 0.03560595041322315, 0.023542417076125203, 248.0, 248.0, 248.0, 0, 1, 1, -360, 26.927 ],
[339, 340, 0, 0.08711537190082644, 0.23040041287850396, 495.0, 495.0, 495.0, 0, 1, 1, -360, 131.762 ],
[559, 340, 0, 0.20983272727272728, 0.138740000599684, 248.0, 248.0, 248.0, 0, 1, 1, -360, 158.686 ],
[341, 292, 0, 0.0009329409048961218, 0.07535316024134399, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 8.083 ],
[557, 342, 0, 0.006019834710743802, 0.0636843933534336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 18.21 ],
[558, 343, 0, 0.010650247933884296, 0.11266996708783199, 991.0, 991.0, 991.0, 0, 1, 1, -360, 32.217 ],
[502, 340, 0, 0.021737520661157025, 0.22996326026071198, 991.0, 991.0, 991.0, 0, 2, 1, -360, 65.756 ],
[72, 32, 0, 0.00675502077562327, 0.969954803293024, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 78.03399999999999 ],
[344, 345, 0, 0.0005762927054480609, 0.04654686738645321, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 4.993 ],
[346, 47, 0, 0.0011340027700831024, 0.04070792194158799, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 6.55 ],
[46, 47, 0, 0.0008975069252077563, 0.0322183003580208, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.184 ],
[346, 345, 0, 0.0007217797783933517, 0.025910126194627202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 4.169 ],
[347, 328, 0, 0.029905454545454544, 0.07909314882361201, 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.232 ],
[347, 348, 0, 0.04883438016528925, 0.129155866607944, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.862 ],
[571, 348, 0, 0.041548429752066116, 0.10988617921762801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 62.842 ],
[347, 572, 0, 0.016052231404958678, 0.04245451362512801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 24.279 ],
[571, 570, 0, 0.17379041322314048, 0.11490906279551602, 248.0, 248.0, 248.0, 0, 1, 1, -360, 131.429 ],
[14, 350, 0, 0.02166743801652892, 0.05730546235524, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.772 ],
[350, 573, 0, 0.026277685950413226, 0.06949852316919598, 495.0, 495.0, 495.0, 0, 1, 1, -360, 39.745 ],
[15, 351, 0, 0.02639265927977839, 0.236857956201204, 856.0, 856.0, 856.0, 0, 1, 1, -360, 76.222 ],
[352, 15, 0, 0.0015260560941828254, 0.219126704094076, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.629 ],
[15, 335, 0, 0.0035338758079432133, 1.1417173740880242, 5134.0, 5134.0, 5134.0, 0, 1, 1, -360, 61.235 ],
[232, 227, 0, 5.5747922437673134e-05, 0.000500303468136644, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 0.161 ],
[565, 544, 0, 0.0394803305785124, 0.10441652566461601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 59.714 ],
[235, 567, 0, 0.02391404958677686, 0.25298896294275997, 991.0, 991.0, 991.0, 0, 1, 1, -360, 72.34 ],
[567, 286, 0, 0.008068760330578512, 0.34144067500694797, 1981.0, 1981.0, 1981.0, 0, 1, 1, -360, 48.816 ],
[353, 519, 0, 0.007621818181818182, 0.080631926038356, 991.0, 991.0, 991.0, 0, 1, 1, -360, 23.055999999999997 ],
[354, 353, 0, 0.0008436363636363636, 0.00892490784392768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.552 ],
[355, 354, 0, 0.0068502479338842966, 0.0181173530898976, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.360999999999999 ],
[354, 356, 0, 0.01855404958677686, 0.049071255647172, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.063000000000002 ],
[357, 358, 0, 0.0034823407202216067, 0.5000300103406239, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 40.228 ],
[574, 359, 0, 0.013352066115702478, 0.0353131884615884, 495.0, 495.0, 495.0, 0, 1, 1, -360, 20.195 ],
[235, 575, 0, 0.007459504132231404, 0.0789147905557, 991.0, 991.0, 991.0, 0, 1, 1, -360, 22.565 ],
[167, 361, 0, 0.000616198347107438, 0.0065188198358579995, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.864 ],
[528, 362, 0, 0.0011960330578512398, 0.012652945368078402, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.6180000000000003 ],
[363, 344, 0, 0.0002662742382271468, 0.009558592968871479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.538 ],
[259, 364, 0, 0.013069713758102496, 0.26390852570525997, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 56.618 ],
[54, 56, 0, 0.007723337950138504, 0.0693122289241068, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.305 ],
[365, 364, 0, 0.0049974607571537395, 0.10091058802821559, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 21.649 ],
[231, 366, 0, 0.0013273891966759002, 0.0476500209962672, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 7.667000000000001 ],
[30, 367, 0, 0.01126108033240997, 0.1010613005635992, 856.0, 856.0, 856.0, 0, 1, 1, -360, 32.522 ],
[61, 367, 0, 0.020337603878116343, 0.18251754162067196, 856.0, 856.0, 856.0, 0, 1, 1, -360, 58.735 ],
[254, 368, 0, 0.0004297520661157025, 0.00454638722456732, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.3 ],
[254, 369, 0, 0.00015999999999999999, 0.00169265493591832, 991.0, 991.0, 991.0, 0, 2, 1, -360, 0.484 ],
[254, 370, 0, 0.0003669421487603306, 0.0038819152455960805, 991.0, 991.0, 991.0, 0, 2, 1, -360, 1.11 ],
[99, 358, 0, 0.0020184383656509696, 0.28982797432374396, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 23.316999999999997 ],
[354, 519, 0, 0.006762644628099174, 0.07154264880985199, 991.0, 991.0, 991.0, 0, 1, 1, -360, 20.457 ],
[571, 371, 0, 0.023726942148760328, 0.06275238397221199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 35.887 ],
[207, 372, 0, 0.002329256198347108, 0.006160354689297601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.523 ],
[57, 373, 0, 0.0017725619834710745, 0.0046880246727212796, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.681 ],
[209, 374, 0, 0.0010122922437673131, 0.0363388121515216, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.847 ],
[375, 376, 0, 0.0045364727608518006, 0.0916021467933684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 19.652 ],
[376, 377, 0, 0.0030886426592797783, 0.062367022394423606, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 13.38 ],
[16, 49, 0, 0.002266101108033241, 0.32538991773524, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 26.178 ],
[318, 377, 0, 0.004755078485685596, 0.0960163149704152, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 20.599 ],
[378, 297, 0, 0.01753917355371901, 0.046387138574374404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 26.528000000000002 ],
[562, 379, 0, 0.01802314049586777, 0.047667121439141605, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.26 ],
[576, 563, 0, 0.001808264462809917, 0.004782449638150801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.735 ],
[576, 381, 0, 0.0034320661157024794, 0.009077036954898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.191 ],
[577, 576, 0, 0.06004495867768594, 0.15880530575430396, 495.0, 495.0, 495.0, 0, 1, 1, -360, 90.818 ],
[244, 383, 0, 0.006845567867036011, 0.1382282547912684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 29.655 ],
[244, 306, 0, 0.02679108956599723, 0.5409756541164079, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 116.059 ],
[383, 306, 0, 0.0300685595567867, 0.269846910348376, 856.0, 856.0, 856.0, 0, 1, 1, -360, 86.838 ],
[380, 306, 0, 0.00025605955678670365, 0.03676764369572, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 2.958 ],
[252, 225, 0, 0.062094545454545444, 0.041056499553586, 248.0, 248.0, 248.0, 0, 1, 1, -360, 46.958999999999996 ],
[220, 76, 0, 0.002772074099722992, 0.398042682239984, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 32.023 ],
[542, 384, 0, 0.007939834710743802, 0.020999063146094, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.009 ],
[385, 384, 0, 0.053734876033057856, 0.035529141854791196, 248.0, 248.0, 248.0, 0, 1, 1, -360, 40.637 ],
[542, 385, 0, 0.011306115702479337, 0.119608453436296, 991.0, 991.0, 991.0, 0, 2, 1, -360, 34.201 ],
[386, 385, 0, 0.003668760330578512, 0.0388121580140316, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.097999999999999 ],
[387, 578, 0, 0.015444628099173553, 0.16339016240905604, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.72 ],
[332, 388, 0, 0.014036184210526315, 0.5038646344377999, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 81.07300000000001 ],
[382, 332, 0, 0.017764369806094183, 0.637697365901468, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 102.60700000000001 ],
[382, 388, 0, 0.00476159972299169, 0.17092976750548, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 27.503 ],
[579, 578, 0, 0.01911074380165289, 0.050543585664, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.905 ],
[577, 387, 0, 0.07597818181818182, 0.20094506949431204, 495.0, 495.0, 495.0, 0, 1, 1, -360, 114.917 ],
[144, 390, 0, 0.0004277685950413223, 0.0011313509747276, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.647 ],
[37, 49, 0, 0.008441481994459835, 0.303028527944352, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 48.758 ],
[391, 233, 0, 0.014211218836565096, 0.1275369872004348, 856.0, 856.0, 856.0, 0, 1, 1, -360, 41.042 ],
[392, 310, 0, 0.007035318559556785, 0.06313767618386361, 856.0, 856.0, 856.0, 0, 1, 1, -360, 20.317999999999998 ],
[260, 393, 0, 0.006341412742382271, 0.0569102963692744, 856.0, 856.0, 856.0, 0, 1, 1, -360, 18.314 ],
[394, 230, 0, 0.0007590027700831025, 0.00681158510656168, 856.0, 856.0, 856.0, 0, 1, 1, -360, 2.1919999999999997 ],
[395, 282, 0, 0.008762984764542936, 0.314569689934484, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.615 ],
[395, 244, 0, 0.0034046052631578946, 0.12221699007344, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 19.665 ],
[25, 396, 0, 0.008809037396121884, 0.316222866612064, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.881 ],
[81, 74, 0, 0.0075207756232686974, 0.26997742429652244, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 43.44 ],
[278, 80, 0, 0.016286011080332407, 0.5846279085788, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 94.068 ],
[81, 278, 0, 0.021054016620498613, 0.755787629231688, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 121.60799999999999 ],
[569, 570, 0, 0.03253950413223141, 0.08605961294018, 495.0, 495.0, 495.0, 0, 1, 1, -360, 49.216 ],
[397, 552, 0, 0.006289586776859504, 0.0166345314104904, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 9.513 ],
[542, 398, 0, 0.0005580165289256199, 0.0059033089500572, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.6880000000000002 ],
[398, 385, 0, 0.021893553719008262, 0.05790348713648401, 495.0, 495.0, 495.0, 0, 1, 1, -360, 33.114000000000004 ],
[399, 499, 0, 0.03266380165289256, 0.021597087927192803, 248.0, 248.0, 248.0, 0, 1, 1, -360, 24.701999999999998 ],
[83, 399, 0, 0.025700495867768593, 0.016992996557050798, 248.0, 248.0, 248.0, 0, 1, 1, -360, 19.436 ],
[498, 400, 0, 0.012134214876033058, 0.032092247974028, 495.0, 495.0, 495.0, 0, 1, 1, -360, 18.352999999999998 ],
[518, 239, 0, 0.04685289256198347, 0.123915281026504, 495.0, 495.0, 495.0, 0, 1, 1, -360, 70.865 ],
[575, 543, 0, 0.0030307438016528923, 0.032062521596058796, 991.0, 991.0, 991.0, 0, 1, 1, -360, 9.168 ],
[401, 360, 0, 0.007957063711911357, 0.071409774520472, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.98 ],
[580, 581, 0, 0.007134545454545454, 0.018869255592422397, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.790999999999999 ],
[401, 402, 0, 0.0033434903047091418, 0.030005778188384805, 856.0, 856.0, 856.0, 0, 1, 1, -360, 9.656 ],
[403, 231, 0, 0.009592105263157893, 0.08608327126915, 856.0, 856.0, 856.0, 0, 1, 1, -360, 27.701999999999998 ],
[189, 360, 0, 0.028456024930747923, 0.255375399471348, 856.0, 856.0, 856.0, 0, 1, 1, -360, 82.181 ],
[234, 404, 0, 0.008092561983471074, 0.0214029921648796, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.24 ],
[235, 404, 0, 0.05107504132231405, 0.13508190749437998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 77.251 ],
[235, 580, 0, 0.000580495867768595, 0.00153527999352772, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.878 ],
[216, 259, 0, 0.0022115650969529088, 0.079389770210892, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 12.774000000000001 ],
[405, 259, 0, 0.0052832409972299165, 0.1896554115982928, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 30.516 ],
[405, 318, 0, 0.0066348684210526315, 0.23817552558268398, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 38.323 ],
[406, 230, 0, 8.098164819944598e-05, 0.046512685161986804, 6845.0, 6845.0, 6845.0, 0, 1, 1, -360, 1.871 ],
[542, 407, 0, 0.025569586776859506, 0.067625761355152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 38.674 ],
[23, 408, 0, 0.03224528925619835, 0.08528148128033601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 48.771 ],
[577, 348, 0, 0.012999008264462809, 0.13751772188026398, 991.0, 991.0, 991.0, 0, 2, 1, -360, 39.321999999999996 ],
[562, 564, 0, 0.06921520661157024, 0.18305853298686803, 495.0, 495.0, 495.0, 0, 1, 1, -360, 104.68799999999999 ],
[582, 507, 0, 0.006357685950413223, 0.016814638289042002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.616 ],
[27, 410, 0, 0.0030042975206611565, 0.007945685980170399, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.544 ],
[501, 27, 0, 0.003811570247933884, 0.040322957460962, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.53 ],
[27, 411, 0, 0.004648595041322314, 0.012294480221518, 495.0, 495.0, 495.0, 0, 1, 1, -360, 7.031000000000001 ],
[411, 410, 0, 0.002054214876033058, 0.0054329327333556, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.1069999999999998 ],
[403, 360, 0, 0.008191481994459833, 0.07351353506655639, 856.0, 856.0, 856.0, 0, 1, 1, -360, 23.656999999999996 ],
[412, 360, 0, 0.016761772853185596, 0.15042664773666, 856.0, 856.0, 856.0, 0, 1, 1, -360, 48.408 ],
[326, 413, 0, 0.012077024793388432, 0.12776397267356798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 36.533 ],
[414, 413, 0, 0.008093223140495867, 0.08561896310149601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 24.482 ],
[6, 297, 0, 0.019472396694214876, 0.0128750188978664, 248.0, 248.0, 248.0, 0, 1, 1, -360, 14.725999999999999 ],
[554, 580, 0, 0.07435371900826447, 0.196648733567264, 495.0, 495.0, 495.0, 0, 1, 1, -360, 112.46 ],
[262, 401, 0, 0.03931232686980609, 0.35280406181043206, 856.0, 856.0, 856.0, 0, 1, 1, -360, 113.53399999999999 ],
[499, 556, 0, 0.04185586776859504, 0.11069928308639199, 495.0, 495.0, 495.0, 0, 2, 1, -360, 63.306999999999995 ],
[224, 229, 0, 0.004135206611570248, 0.0437467367631624, 991.0, 991.0, 991.0, 0, 1, 1, -360, 12.509 ],
[583, 507, 0, 0.024632727272727268, 0.065147980317596, 495.0, 495.0, 495.0, 0, 1, 1, -360, 37.257 ],
[415, 307, 0, 0.015675554016620498, 0.1406784987952448, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.271 ],
[416, 507, 0, 0.0010555371900826446, 0.011166626467730801, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.193 ],
[284, 561, 0, 0.015221487603305786, 0.16102953827307598, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.045 ],
[543, 417, 0, 0.0006614876033057851, 0.027991756419545603, 1981.0, 1981.0, 1981.0, 0, 4, 1, -360, 4.002 ],
[418, 506, 0, 0.0009395041322314049, 0.009939101917118, 991.0, 991.0, 991.0, 0, 1, 1, -360, 2.842 ],
[220, 157, 0, 0.004599549861495845, 0.165112574384632, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 26.566999999999997 ],
[295, 419, 0, 0.0012023140495867769, 0.012719392565946, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.637 ],
[295, 420, 0, 0.0008003305785123967, 0.008466771900532, 991.0, 991.0, 991.0, 0, 1, 1, -360, 2.421 ],
[541, 62, 0, 0.05133355371900827, 0.0339414035471236, 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.821 ],
[52, 421, 0, 0.00013885041551246538, 0.004984389831631239, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.802 ],
[60, 160, 0, 6.128808864265928e-05, 0.000550023067454096, 856.0, 856.0, 856.0, 0, 2, 1, -360, 0.177 ],
[535, 161, 0, 3.735537190082645e-05, 0.00039518596644331203, 991.0, 991.0, 991.0, 0, 2, 1, -360, 0.113 ],
[267, 282, 0, 0.0065652700831024926, 0.235677115717012, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 37.921 ],
[52, 365, 0, 0.007655586334279779, 0.15458444922992, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 33.164 ],
[28, 27, 0, 0.015726942148760328, 0.041594197273402404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.787 ],
[30, 201, 0, 0.009128289473684211, 0.327683234253536, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 52.725 ],
[422, 81, 0, 0.0004226685133887349, 0.13655487952674, 5134.0, 5134.0, 5134.0, 0, 6, 1, -360, 7.324 ],
[119, 425, 0, 0.003579120498614958, 0.1284816595874996, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.673000000000002 ],
[423, 425, 0, 0.0006518351800554017, 0.0233992864289392, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 3.765 ],
[424, 425, 0, 0.005922957063711911, 0.21261965153389198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.211 ],
[426, 428, 0, 0.013948429752066116, 0.14756174042535197, 991.0, 991.0, 991.0, 0, 2, 1, -360, 42.193999999999996 ],
[427, 428, 0, 0.0002664462809917355, 0.0028187600792304794, 991.0, 991.0, 991.0, 0, 2, 1, -360, 0.8059999999999999 ],
[19, 428, 0, 0.023607603305785128, 0.24974703912892798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 71.413 ],
[45, 429, 0, 0.02562314049586777, 0.067767398802972, 495.0, 495.0, 495.0, 0, 1, 1, -360, 38.755 ],
[44, 429, 0, 5.289256198347107e-05, 0.00013988883767892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.08 ],
[505, 429, 0, 0.006012561983471073, 0.015901863623161996, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.094 ],
[231, 431, 0, 0.011677285318559558, 0.4191859418495199, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 67.44800000000001 ],
[190, 431, 0, 0.009600761772853185, 0.34464383257266795, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 55.45399999999999 ],
[430, 431, 0, 0.0028100761772853187, 0.1008748520662472, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.230999999999998 ],
[286, 433, 0, 0.01568694214876033, 0.16595362535967603, 991.0, 991.0, 991.0, 0, 1, 1, -360, 47.453 ],
[432, 433, 0, 0.00010049586776859504, 0.00106315516636076, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.304 ],
[506, 433, 0, 0.0065904132231404955, 0.06972059669946801, 991.0, 991.0, 991.0, 0, 1, 1, -360, 19.936 ],
[23, 434, 0, 0.02613685950413223, 0.069126069139116, 495.0, 495.0, 495.0, 0, 2, 1, -360, 39.532 ],
[400, 434, 0, 0.008155371900826446, 0.021569110159669603, 495.0, 495.0, 495.0, 0, 2, 1, -360, 12.335 ],
[500, 434, 0, 0.006338512396694216, 0.0167639285853336, 495.0, 495.0, 495.0, 0, 2, 1, -360, 9.587 ],
[32, 436, 0, 0.0044813019390581715, 0.16086776359270402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 25.884 ],
[435, 436, 0, 0.0006634349030470914, 0.023815688073266, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 3.832 ],
[78, 436, 0, 0.00897680055401662, 0.32224515307884394, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.85 ],
[86, 438, 0, 0.014693213296398892, 0.52745036936438, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 84.868 ],
[437, 438, 0, 1.0387811634349031e-05, 0.0003728969948845, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.06 ],
[221, 438, 0, 0.002280124653739612, 0.081850890377238, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.17 ],
[207, 439, 0, 0.055703801652892564, 0.0368309823503996, 248.0, 248.0, 248.0, 0, 1, 1, -360, 42.126000000000005 ],
[516, 439, 0, 0.05448462809917355, 0.03602487292327441, 248.0, 248.0, 248.0, 0, 1, 1, -360, 41.20399999999999 ],
[513, 439, 0, 0.046726611570247926, 0.0308953241066316, 248.0, 248.0, 248.0, 0, 1, 1, -360, 35.336999999999996 ],
[181, 441, 0, 0.040805289256198356, 0.10792074104825197, 495.0, 495.0, 495.0, 0, 1, 1, -360, 61.718 ],
[440, 441, 0, 0.0001322314049586777, 0.000349722094197784, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.2 ],
[504, 441, 0, 0.05916099173553719, 0.156467413554364, 495.0, 495.0, 495.0, 0, 1, 1, -360, 89.48100000000001 ],
[135, 442, 0, 0.004956890581717451, 0.177940231009092, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 28.631 ],
[109, 442, 0, 0.0015380886426592797, 0.055213615042649204, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 8.884 ],
[112, 442, 0, 0.0027304362880886425, 0.09801597510545401, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.770999999999999 ],
[113, 443, 0, 0.0019885734072022164, 0.07138491472072879, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 11.485999999999999 ],
[132, 443, 0, 0.006788434903047091, 0.24368818615747198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 39.21 ],
[107, 443, 0, 2.2333795013850418e-05, 0.000801728539002036, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.129 ],
[444, 445, 0, 7.877423822714682e-05, 0.00282780221121528, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.455 ],
[112, 445, 0, 0.002816135734072022, 0.101092375313206, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.266 ],
[109, 445, 0, 0.0014354224376731304, 0.0515281497432104, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 8.291 ],
[119, 447, 0, 0.005212690443213296, 0.74849127803204, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 60.217 ],
[100, 447, 0, 0.0050695117728531865, 0.7279322237145921, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 58.563 ],
[446, 447, 0, 2.9518698060941832e-05, 0.00423859584186224, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.341 ],
[124, 448, 0, 6.509695290858726e-05, 0.00233682116794768, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.376 ],
[125, 448, 0, 0.00615148891966759, 0.22082338542026803, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.531 ],
[131, 448, 0, 3.912742382271468e-05, 0.0014045786807313759, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.226 ],
[449, 450, 0, 0.0023614958448753462, 0.08477191683710039, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.64 ],
[173, 450, 0, 0.002862361495844876, 0.10275176694050518, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.533 ],
[184, 450, 0, 0.004022853185595568, 0.14441057621844403, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.236 ],
[144, 451, 0, 0.007672727272727273, 0.020292624515794402, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.605 ],
[140, 451, 0, 0.006991074380165291, 0.018489807120219602, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.574000000000002 ],
[514, 451, 0, 0.01149289256198347, 0.030396095817207994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.383 ],
[537, 585, 0, 0.05072595041322314, 0.134158641165824, 495.0, 495.0, 495.0, 0, 1, 1, -360, 76.723 ],
[141, 585, 0, 0.007994710743801653, 0.0211441978151932, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.092 ],
[584, 585, 0, 9.256198347107438e-05, 0.000244805465938352, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.14 ],
[522, 454, 0, 0.0035008264462809916, 0.0092588924438956, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.295 ],
[144, 454, 0, 0.00452892561983471, 0.011977981726290799, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.85 ],
[453, 454, 0, 0.001114710743801653, 0.0029481572540882, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.686 ],
[199, 456, 0, 0.013063140495867768, 0.0086372614214612, 248.0, 248.0, 248.0, 0, 1, 1, -360, 9.879 ],
[140, 456, 0, 0.005061818181818182, 0.013387361765852802, 495.0, 495.0, 495.0, 0, 2, 1, -360, 7.656000000000001 ],
[455, 456, 0, 0.0011365289256198346, 0.00300586139962416, 495.0, 495.0, 495.0, 0, 2, 1, -360, 1.719 ],
[537, 456, 0, 0.039058512396694216, 0.025825228046024003, 248.0, 248.0, 248.0, 0, 1, 1, -360, 29.538 ],
[538, 457, 0, 0.027927272727272728, 0.0184653265736368, 248.0, 248.0, 248.0, 0, 1, 1, -360, 21.12 ],
[153, 457, 0, 0.030093223140495867, 0.019897438549384, 248.0, 248.0, 248.0, 0, 1, 1, -360, 22.758000000000003 ],
[176, 457, 0, 0.004579173553719009, 0.0030277190305137603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 3.463 ],
[524, 459, 0, 0.004318677685950414, 0.011421923596476799, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.532 ],
[458, 459, 0, 0.001993388429752066, 0.0052720605700488, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.015 ],
[134, 459, 0, 0.011813553719008265, 0.031244171895617998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.868 ],
[460, 461, 0, 6.611570247933885e-05, 0.000174861047098892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.1 ],
[150, 461, 0, 0.008018512396694214, 0.021207147792120403, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.128 ],
[149, 461, 0, 0.005586115702479339, 0.0147740098693748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.449 ],
[521, 463, 0, 0.014348429752066114, 0.009487086110365599, 248.0, 248.0, 248.0, 0, 1, 1, -360, 10.850999999999999 ],
[462, 463, 0, 0.007197355371900825, 0.0047588433967958406, 248.0, 248.0, 248.0, 0, 1, 1, -360, 5.443 ],
[538, 463, 0, 0.012211570247933883, 0.0080742088497664, 248.0, 248.0, 248.0, 0, 1, 1, -360, 9.235 ],
[110, 464, 0, 0.0025753116343490306, 0.0924473799817492, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.875 ],
[90, 464, 0, 0.007328947368421053, 0.26309125979076, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.332 ],
[165, 464, 0, 0.002152527700831025, 0.0772704722900764, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.433 ],
[458, 465, 0, 0.002003305785123967, 0.0052982897270776, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.03 ],
[134, 465, 0, 0.011838677685950413, 0.031310619093534, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.906 ],
[524, 465, 0, 0.004293553719008264, 0.0113554763986092, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.494 ],
[466, 467, 0, 0.0023509349030470914, 0.084392804892244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.579 ],
[110, 467, 0, 0.0025337603878116343, 0.09095579200221118, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.635 ],
[165, 467, 0, 0.0022891274238227145, 0.08217406777274441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.222000000000001 ],
[468, 469, 0, 0.0005269421487603305, 0.0013936425453786, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.797 ],
[541, 469, 0, 0.022390743801652895, 0.05921844221026801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 33.866 ],
[490, 469, 0, 0.028243305785123966, 0.07469714209944801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.718 ],
[263, 471, 0, 0.0371900826446281, 0.0245898347482832, 248.0, 248.0, 248.0, 0, 1, 1, -360, 28.125 ],
[470, 471, 0, 0.001570909090909091, 0.0010386746197682802, 248.0, 248.0, 248.0, 0, 1, 1, -360, 1.188 ],
[534, 471, 0, 0.024497190082644622, 0.0161973787927468, 248.0, 248.0, 248.0, 0, 1, 1, -360, 18.526 ],
[136, 472, 0, 0.0007079293628808865, 0.025412930201351602, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 4.0889999999999995 ],
[110, 472, 0, 0.00019511772853185596, 0.0070042485539216805, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.127 ],
[251, 472, 0, 4.207063711911357e-05, 0.00151023282928764, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.243 ],
[226, 474, 0, 0.017639669421487602, 0.011663231841509601, 248.0, 248.0, 248.0, 0, 1, 1, -360, 13.34 ],
[473, 474, 0, 0.003467107438016529, 0.00916971330986216, 495.0, 495.0, 495.0, 0, 2, 1, -360, 5.244 ],
[257, 474, 0, 0.020264462809917356, 0.053594910935781594, 495.0, 495.0, 495.0, 0, 2, 1, -360, 30.65 ],
[6, 474, 0, 0.08066247933884299, 0.05333349367016, 248.0, 248.0, 248.0, 0, 1, 1, -360, 61.001000000000005 ],
[299, 475, 0, 0.013238227146814403, 0.47521993028123993, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 76.464 ],
[3, 475, 0, 0.0002794321329639889, 0.010030929162389441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.614 ],
[210, 475, 0, 0.0001481994459833795, 0.00531999712702368, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.856 ],
[297, 476, 0, 0.0193500826446281, 0.05117658265464801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.267 ],
[296, 476, 0, 0.005596694214876033, 0.014801987636898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.465 ],
[295, 476, 0, 0.0009474380165289256, 0.00250575880492432, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.433 ],
[313, 478, 0, 0.008696849030470914, 0.31219557906752804, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.233000000000004 ],
[477, 478, 0, 1.5235457063711912e-05, 0.0005469155924977479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.08800000000000001 ],
[245, 478, 0, 0.005264542936288089, 0.188984197007248, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.408 ],
[479, 481, 0, 0.028420495867768597, 0.07516576970575199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.986000000000004 ],
[565, 481, 0, 0.024842314049586776, 0.065702289836964, 495.0, 495.0, 495.0, 0, 1, 1, -360, 37.574 ],
[480, 481, 0, 7.735537190082645e-05, 0.000204587425105844, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.11699999999999999 ],
[415, 482, 0, 0.011021814404432133, 0.0989140353680364, 856.0, 856.0, 856.0, 0, 1, 1, -360, 31.831 ],
[56, 482, 0, 0.002630886426592798, 0.0236105947261788, 856.0, 856.0, 856.0, 0, 1, 1, -360, 7.598 ],
[409, 482, 0, 0.0007635041551246537, 0.0068519822810072005, 856.0, 856.0, 856.0, 0, 1, 1, -360, 2.205 ],
[483, 484, 0, 9.037396121883656e-05, 0.000811050963873968, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.261 ],
[3, 484, 0, 0.010022160664819944, 0.08994275516621358, 856.0, 856.0, 856.0, 0, 1, 1, -360, 28.944000000000003 ],
[301, 484, 0, 0.00966516620498615, 0.08673894848517479, 856.0, 856.0, 856.0, 0, 1, 1, -360, 27.913 ],
[233, 485, 0, 0.01410180055401662, 0.1265550251138996, 856.0, 856.0, 856.0, 0, 1, 1, -360, 40.726 ],
[392, 485, 0, 0.00914819944598338, 0.0820994883738036, 856.0, 856.0, 856.0, 0, 1, 1, -360, 26.42 ],
[391, 485, 0, 8.518005540166207e-05, 0.000764438839512864, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.24600000000000002 ],
[579, 488, 0, 0.004636473829194215, 0.11036180126571601, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 21.038 ],
[486, 488, 0, 0.00016969696969690082, 0.00403929018798184, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 0.77 ],
[487, 488, 0, 0.00014567493112954544, 0.00346749456396992, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 0.6609999999999999 ],
[270, 489, 0, 0.0001745152354570637, 0.0062646695140596, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.008 ],
[331, 489, 0, 0.003002943213296399, 0.10779830627119119, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 17.345 ],
[396, 489, 0, 0.01124792243767313, 0.40377286606072005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 64.968 ],
[519, 253, 0, 0.013353485337561985, 0.141267767926912, 991.0, 991.0, 991.0, 0, 1, 1, -360, 40.394293146100004 ],
[382, 349, 0, 0.009091647380263157, 1.30547149138788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 105.02671053600001 ],
[349, 351, 0, 0.0005858117819605263, 0.0841168325920224, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 6.76729770521 ],
[459, 465, 0, 1.578788789911157e-05, 0.00016702153987596, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.047758360894800005 ],
[549, 550, 0, 3.680432518409091e-05, 0.000389356391787088, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.111333083682 ],
[550, 551, 0, 5.755645674710744e-05, 0.0006088951287918401, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.17410828165999997 ],
[194, 195, 0, 1.7560672583171745e-05, 0.00252154053805592, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.202860889681 ],
[247, 248, 0, 2.1755213937811637e-05, 0.0031238355819477198, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.25131623141 ],
[2, 294, 0, 2.3531392658518004e-05, 0.003378877444715, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.271834647991 ],
[549, 551, 0, 9.265809538429751e-05, 0.0009802386406577602, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.28029073853799996 ],
[54, 365, 0, 2.573045189134349e-05, 0.00369464080598484, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.297238180249 ],
[131, 265, 0, 2.7616389041343487e-05, 0.00396544290388756, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.319024526206 ],
[91, 92, 0, 2.8945628197853184e-05, 0.0041563086239824396, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.33437989694200004 ],
[247, 249, 0, 3.098840072160664e-05, 0.00444963074500788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.357978005136 ],
[186, 191, 0, 3.1591661821191135e-05, 0.00453625312865552, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.36494687735799997 ],
[129, 173, 0, 3.202671277479225e-05, 0.00459872218332188, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.369972585975 ],
[96, 202, 0, 3.5971247867797784e-05, 0.00516511877739804, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.415539855369 ],
[53, 320, 0, 3.784209581142659e-05, 0.00543375421308236, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.437151890814 ],
[24, 396, 0, 4.144748602818559e-05, 0.005951452925597279, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.47880135859800005 ],
[133, 156, 0, 4.431754564044322e-05, 0.0063635653674415605, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.511956287238 ],
[442, 452, 0, 4.483572190450138e-05, 0.006437970402313801, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.517942259441 ],
[445, 452, 0, 4.490753296371191e-05, 0.0064482817668697215, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.518771820797 ],
[247, 250, 0, 4.594910768732687e-05, 0.00659784169268824, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.530804092004 ],
[187, 195, 0, 4.755760376239612e-05, 0.006828805970367921, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.549385438663 ],
[216, 236, 0, 5.03353075283241e-05, 0.00722765701751724, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.581473472567 ],
[244, 389, 0, 5.1633313019736845e-05, 0.007414037889302401, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.596468032004 ],
[394, 406, 0, 5.6346419007686985e-05, 0.008090793734075721, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.650913832377 ],
[442, 445, 0, 6.388070648310249e-05, 0.00917264360085512, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.737949921293 ],
[442, 444, 0, 6.584378362735456e-05, 0.00945452224616264, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.760627388463 ],
[198, 472, 0, 8.37554210498615e-05, 0.0120264578966664, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.967542623967 ],
[464, 467, 0, 8.460287496468144e-05, 0.01214814397621276, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.977332411594 ],
[198, 251, 0, 8.83613182396122e-05, 0.012687819608389479, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 1.0207499483 ],
[112, 143, 0, 9.049653833033241e-05, 0.012994416294241841, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 1.04541601079 ],
[2, 490, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[5, 491, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[10, 492, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[12, 493, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[13, 494, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[15, 495, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[18, 496, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[20, 497, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[22, 498, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[24, 499, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[26, 500, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[30, 501, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[32, 502, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[37, 503, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[42, 504, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[46, 505, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[52, 506, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[56, 507, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[61, 508, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[68, 509, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[69, 510, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[74, 511, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[78, 512, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[86, 513, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[87, 514, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[94, 515, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[95, 516, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[96, 517, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[99, 518, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[100, 519, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[104, 520, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[105, 521, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[106, 522, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[107, 523, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[117, 524, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[120, 525, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[123, 526, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[124, 527, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[125, 528, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[128, 529, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[129, 530, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[138, 531, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[143, 532, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[156, 533, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[157, 534, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[159, 535, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[160, 536, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[165, 537, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[184, 538, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[191, 539, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[195, 540, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[201, 541, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[220, 542, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[231, 543, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[232, 544, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[233, 545, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[236, 546, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[245, 547, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[246, 548, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[248, 549, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[249, 550, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[250, 551, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[259, 552, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[261, 553, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[262, 554, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[265, 555, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[270, 556, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[277, 557, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[279, 558, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[280, 559, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[290, 560, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[301, 561, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[305, 562, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[306, 563, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[310, 564, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[313, 565, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[315, 566, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[320, 567, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[330, 568, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[332, 569, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[334, 570, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[336, 571, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[349, 572, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[351, 573, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[358, 574, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[360, 575, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[380, 576, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[382, 577, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[383, 578, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[389, 579, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[401, 580, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[402, 581, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[409, 582, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[415, 583, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[444, 584, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[452, 585, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ]
])
ppc["gen_control"] = array([
[586, 1, 0.08658028904199107, 4.329014452099554, 0, 0, 0],
[589, 1, 0.010042676909098597, 0.5021338454549299, 0, 0, 0],
[590, 1, 0.012095775674984046, 0.6047887837492023, 0, 0, 0],
[593, 1, 0.0017666198683200384, 0.08833099341600192, 0, 0, 0],
[594, 1, 0.006047887837492023, 0.30239439187460115, 0, 0, 0],
[595, 1, 1.50560576164933, 75.2802880824665, 0, 0, 0],
[597, 1, 0.030239439187460113, 1.5119719593730057, 0, 0, 0],
[598, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[599, 1, 0.0029602819415092537, 0.1480140970754627, 0, 0, 0],
[601, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[602, 1, 0.007830423200121252, 0.39152116000606263, 0, 0, 0],
[603, 1, 1.0997606567649967, 54.98803283824984, 0, 0, 0],
[607, 1, 0.5729577951308232, 28.64788975654116, 0, 0, 0],
[608, 1, 0.0076394372684109755, 0.3819718634205488, 0, 0, 0],
[609, 1, 0.0057932399285449895, 0.2896619964272495, 0, 0, 0],
[610, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[612, 1, 0.00954929658551372, 0.477464829275686, 0, 0, 0],
[613, 1, 0.027056340325622208, 1.3528170162811104, 0, 0, 0],
[614, 1, 0.00954929658551372, 0.477464829275686, 0, 0, 0],
[616, 1, 0.0046154933496649645, 0.23077466748324824, 0, 0, 0],
[617, 1, 0.04360845440717932, 2.1804227203589663, 0, 0, 0],
[618, 1, 0.010631550198538607, 0.5315775099269304, 0, 0, 0],
[619, 1, 0.037560566569687294, 1.8780283284843649, 0, 0, 0],
[621, 1, 0.24350706293059987, 12.175353146529993, 0, 0, 0],
[623, 1, 0.2419155134996809, 12.095775674984045, 0, 0, 0],
[624, 1, 0.004297183463481174, 0.21485917317405873, 0, 0, 0],
[628, 1, 0.14292113889652203, 7.1460569448261015, 0, 0, 0],
[629, 1, 0.023968734429639437, 1.198436721481972, 0, 0, 0],
[631, 1, 0.025401128917466494, 1.2700564458733248, 0, 0, 0],
[632, 1, 0.01435577586688896, 0.717788793344448, 0, 0, 0],
[637, 1, 0.017093240888069558, 0.854662044403478, 0, 0, 0],
[638, 1, 0.02048324117592693, 1.0241620587963465, 0, 0, 0],
[639, 1, 0.005029296201703893, 0.25146481008519467, 0, 0, 0],
[640, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[641, 1, 0.0040107045659157625, 0.20053522829578813, 0, 0, 0],
[642, 1, 0.00919915571071155, 0.4599577855355775, 0, 0, 0],
[643, 1, 0.27279157245950864, 13.639578622975431, 0, 0, 0],
[646, 1, 0.03278591827693044, 1.6392959138465222, 0, 0, 0],
[647, 1, 0.00445633840657307, 0.2228169203286535, 0, 0, 0],
[650, 1, 0.4216014442504307, 21.080072212521536, 0, 0, 0],
[652, 1, 0.00746436683100989, 0.37321834155049455, 0, 0, 0],
[655, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[657, 1, 0.012095775674984046, 0.6047887837492023, 0, 0, 0],
[658, 1, 0.030239439187460113, 1.5119719593730057, 0, 0, 0],
[661, 1, 0.010408733278209955, 0.5204366639104978, 0, 0, 0],
[662, 1, 0.002928450952890874, 0.1464225476445437, 0, 0, 0],
[663, 1, 0.00238732414637843, 0.1193662073189215, 0, 0, 0],
[666, 1, 0.00919915571071155, 0.4599577855355775, 0, 0, 0],
[668, 1, 0.24382537281678363, 12.191268640839182, 0, 0, 0],
[670, 1, 0.0076394372684109755, 0.3819718634205488, 0, 0, 0],
[672, 1, 0.010536057232683471, 0.5268028616341736, 0, 0, 0],
[676, 1, 0.11777465788800255, 5.888732894400127, 0, 0, 0],
[677, 1, 0.004265352474862795, 0.21326762374313976, 0, 0, 0],
[678, 1, 0.3237211542489151, 16.186057712445756, 0, 0, 0],
[679, 1, 0.2212253708977345, 11.061268544886726, 0, 0, 0],
[681, 1, 0.0063821132179850025, 0.31910566089925013, 0, 0, 0],
[683, 1, 0.008753521870054244, 0.4376760935027122, 0, 0, 0],
[687, 1, 0.42303383873825773, 21.151691936912886, 0, 0, 0],
[689, 1, 0.09867606471697511, 4.933803235848756, 0, 0, 0],
[691, 1, 0.008276057040778557, 0.4138028520389279, 0, 0, 0],
[693, 1, 0.06175211791965539, 3.0876058959827692, 0, 0, 0],
[694, 1, 0.005220282133414166, 0.2610141066707083, 0, 0, 0],
[695, 1, 0.004679155326901723, 0.23395776634508614, 0, 0, 0],
[696, 1, 0.22950142793851305, 11.475071396925653, 0, 0, 0],
[697, 1, 0.0036923946797319715, 0.1846197339865986, 0, 0, 0],
[698, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[699, 1, 0.033295214094824506, 1.6647607047412254, 0, 0, 0],
[700, 1, 0.008594366926962348, 0.42971834634811745, 0, 0, 0],
[701, 1, 0.015024226627874922, 0.7512113313937461, 0, 0, 0],
[702, 1, 0.023363945645890238, 1.168197282294512, 0, 0, 0],
[704, 1, 0.16170142218136566, 8.085071109068283, 0, 0, 0],
[705, 1, 0.005411268065124442, 0.27056340325622213, 0, 0, 0],
[707, 1, 0.010822536130248884, 0.5411268065124443, 0, 0, 0],
[708, 1, 0.0024828171122335675, 0.12414085561167837, 0, 0, 0],
[711, 1, 0.056054370956965534, 2.802718547848277, 0, 0, 0],
[713, 1, 0.004265352474862795, 0.21326762374313976, 0, 0, 0],
[714, 1, 0.00477464829275686, 0.238732414637843, 0, 0, 0],
[716, 1, 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0],
[717, 1, 0.0017507043740108488, 0.08753521870054244, 0, 0, 0],
[719, 1, 0.623250757147862, 31.162537857393104, 0, 0, 0],
[721, 1, 0.0012732395447351628, 0.06366197723675814, 0, 0, 0],
[722, 1, 0.006589014644004467, 0.3294507322002233, 0, 0, 0],
[723, 1, 0.006270704757820675, 0.31353523789103377, 0, 0, 0],
[724, 1, 0.0019257748114119334, 0.09628874057059668, 0, 0, 0],
[725, 1, 0.25464790894703254, 12.732395447351628, 0, 0, 0],
[726, 1, 0.040107045659157625, 2.0053522829578814, 0, 0, 0],
[727, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[728, 1, 0.16233804195373325, 8.116902097686662, 0, 0, 0],
[730, 1, 0.10077690996578814, 5.038845498289407, 0, 0, 0],
[731, 1, 0.2848873481344926, 14.244367406724633, 0, 0, 0],
[732, 1, 0.004647324338283344, 0.2323662169141672, 0, 0, 0],
[733, 1, 0.12624170086049138, 6.312085043024569, 0, 0, 0],
[735, 1, 0.013496339174192726, 0.6748169587096363, 0, 0, 0],
[736, 1, 0.010185916357881302, 0.5092958178940651, 0, 0, 0],
[737, 1, 0.00891267681314614, 0.445633840657307, 0, 0, 0],
[738, 1, 0.04408591923645501, 2.2042959618227504, 0, 0, 0],
[739, 1, 0.01906676218240906, 0.9533381091204531, 0, 0, 0],
[741, 1, 0.0340591578216656, 1.7029578910832803, 0, 0, 0],
[742, 1, 0.0028647889756541157, 0.14323944878270578, 0, 0, 0],
[743, 1, 0.44881693951914486, 22.440846975957243, 0, 0, 0],
[745, 1, 0.013369015219719208, 0.6684507609859605, 0, 0, 0],
[746, 1, 0.03183098861837907, 1.5915494309189535, 0, 0, 0],
[747, 1, 0.0039788735772973835, 0.1989436788648692, 0, 0, 0],
[748, 1, 0.03501408748021698, 1.7507043740108488, 0, 0, 0],
[749, 1, 0.0025464790894703256, 0.12732395447351627, 0, 0, 0],
[750, 1, 0.028902537665488188, 1.4451268832744095, 0, 0, 0],
[758, 1, 0.0058887328944001276, 0.2944366447200064, 0, 0, 0],
[760, 1, 0.2527380496299298, 12.636902481496492, 0, 0, 0],
[761, 1, 0.004997465213085514, 0.2498732606542757, 0, 0, 0],
[762, 1, 0.3517324242330887, 17.586621211654435, 0, 0, 0],
[763, 1, 0.006461690689530951, 0.32308453447654756, 0, 0, 0],
[765, 1, 0.018780283284843647, 0.9390141642421824, 0, 0, 0],
[767, 1, 0.0035650707252584553, 0.17825353626292276, 0, 0, 0],
[769, 1, 0.013782818071758136, 0.6891409035879068, 0, 0, 0],
[771, 1, 0.21963382146681557, 10.981691073340778, 0, 0, 0],
[772, 1, 0.002992112930127632, 0.1496056465063816, 0, 0, 0],
[774, 1, 0.010663381187156987, 0.5331690593578494, 0, 0, 0],
[775, 1, 0.04074366543152521, 2.0371832715762603, 0, 0, 0],
[776, 1, 0.01782535362629228, 0.891267681314614, 0, 0, 0],
[777, 1, 0.012573240504259732, 0.6286620252129866, 0, 0, 0],
[778, 1, 0.004679155326901723, 0.23395776634508614, 0, 0, 0],
[779, 1, 0.010886198107485642, 0.5443099053742821, 0, 0, 0],
[781, 1, 0.4169859509007658, 20.84929754503829, 0, 0, 0],
[784, 1, 0.4058451048843331, 20.292255244216655, 0, 0, 0],
[785, 1, 0.00047746482927568597, 0.0238732414637843, 0, 0, 0],
[786, 1, 0.0621977517603127, 3.109887588015635, 0, 0, 0],
[787, 1, 0.24764509145098912, 12.382254572549456, 0, 0, 0],
[788, 1, 0.2785211504108168, 13.926057520540843, 0, 0, 0],
[789, 1, 0.0123185925953127, 0.615929629765635, 0, 0, 0],
[791, 1, 0.0031830988618379067, 0.15915494309189535, 0, 0, 0],
[792, 1, 0.009979014931861837, 0.49895074659309185, 0, 0, 0],
[793, 1, 0.0031194368846011486, 0.15597184423005744, 0, 0, 0],
[794, 1, 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0],
[795, 1, 0.004329014452099553, 0.2164507226049777, 0, 0, 0],
[796, 1, 0.027088171314240586, 1.3544085657120293, 0, 0, 0],
[798, 1, 0.10179550160157626, 5.089775080078813, 0, 0, 0],
[800, 1, 0.0058091554228541795, 0.290457771142709, 0, 0, 0],
[801, 1, 0.007957747154594767, 0.3978873577297384, 0, 0, 0],
[802, 1, 0.07957747154594767, 3.9788735772973833, 0, 0, 0],
[805, 1, 0.44881693951914486, 22.440846975957243, 0, 0, 0],
[806, 1, 0.005697746962689853, 0.2848873481344927, 0, 0, 0],
[808, 1, 0.034616200122487235, 1.7308100061243619, 0, 0, 0],
[809, 1, 0.0039788735772973835, 0.1989436788648692, 0, 0, 0],
[811, 1, 0.0040107045659157625, 0.20053522829578813, 0, 0, 0],
[814, 1, 0.014164789935178685, 0.7082394967589343, 0, 0, 0],
[816, 1, 0.012748310941660816, 0.6374155470830408, 0, 0, 0],
[817, 1, 0.017188733853924696, 0.8594366926962349, 0, 0, 0],
[818, 1, 0.24096058384112953, 12.048029192056477, 0, 0, 0],
[821, 1, 0.013130282805081364, 0.6565141402540683, 0, 0, 0],
[822, 1, 0.04265352474862795, 2.1326762374313977, 0, 0, 0],
[825, 1, 0.013591832140047864, 0.6795916070023932, 0, 0, 0],
[826, 1, 0.018461973398659858, 0.9230986699329929, 0, 0, 0],
[830, 1, 0.02832957987035737, 1.4164789935178685, 0, 0, 0],
[833, 1, 0.0059205638830185075, 0.2960281941509254, 0, 0, 0],
[834, 1, 0.007416620348082323, 0.37083101740411617, 0, 0, 0],
[835, 1, 0.010138169874953733, 0.5069084937476867, 0, 0, 0],
[836, 1, 0.008116902097686661, 0.4058451048843331, 0, 0, 0],
[837, 1, 0.15024226627874918, 7.512113313937459, 0, 0, 0],
[839, 1, 0.011666057328635928, 0.5833028664317964, 0, 0, 0],
[840, 1, 0.4427690516816528, 22.138452584082643, 0, 0, 0],
[841, 1, 0.0037083101740411615, 0.18541550870205808, 0, 0, 0],
[843, 1, 0.10599719209920229, 5.2998596049601145, 0, 0, 0],
[844, 1, 0.012732395447351627, 0.6366197723675814, 0, 0, 0],
[845, 1, 0.10122254380644544, 5.061127190322272, 0, 0, 0],
[848, 1, 0.013369015219719208, 0.6684507609859605, 0, 0, 0],
[849, 1, 0.24796340133717296, 12.398170066858649, 0, 0, 0],
[850, 1, 0.005092958178940651, 0.25464790894703254, 0, 0, 0],
[851, 1, 0.01265281797580568, 0.632640898790284, 0, 0, 0],
[852, 1, 0.005092958178940651, 0.25464790894703254, 0, 0, 0],
[853, 1, 0.0036923946797319715, 0.1846197339865986, 0, 0, 0],
[855, 1, 0.21899720169444797, 10.949860084722399, 0, 0, 0],
[856, 1, 0.011459155902616463, 0.5729577951308231, 0, 0, 0],
[857, 1, 0.4462704604296745, 22.313523021483725, 0, 0, 0],
[858, 1, 0.01808000153523931, 0.9040000767619655, 0, 0, 0],
[859, 1, 0.027056340325622208, 1.3528170162811104, 0, 0, 0],
[860, 1, 0.0039788735772973835, 0.1989436788648692, 0, 0, 0],
[862, 1, 0.23077466748324824, 11.538733374162412, 0, 0, 0],
[863, 1, 0.0001909859317102744, 0.00954929658551372, 0, 0, 0],
[864, 1, 0.2785211504108168, 13.926057520540843, 0, 0, 0],
[865, 1, 0.0035014087480216977, 0.17507043740108488, 0, 0, 0],
[866, 1, 0.08290062675770644, 4.145031337885323, 0, 0, 0],
[867, 1, 0.24478030247533505, 12.239015123766753, 0, 0, 0],
[869, 1, 0.4329014452099553, 21.645072260497766, 0, 0, 0],
[870, 1, 0.018589297353133374, 0.9294648676566688, 0, 0, 0],
[872, 1, 0.00716197243913529, 0.3580986219567645, 0, 0, 0],
[873, 1, 0.038833806114422456, 1.941690305721123, 0, 0, 0],
[874, 1, 0.006589014644004467, 0.3294507322002233, 0, 0, 0],
[875, 1, 0.007766761222884492, 0.38833806114422464, 0, 0, 0],
[876, 1, 0.018589297353133374, 0.9294648676566688, 0, 0, 0],
[877, 1, 0.007894085177358009, 0.39470425886790045, 0, 0, 0],
[881, 1, 0.3187236890358296, 15.93618445179148, 0, 0, 0],
[882, 1, 0.005538592019597957, 0.2769296009798979, 0, 0, 0],
[883, 1, 0.005729577951308231, 0.28647889756541156, 0, 0, 0],
[885, 1, 0.15597184423005742, 7.798592211502871, 0, 0, 0],
[886, 1, 0.8186930272647096, 40.93465136323548, 0, 0, 0],
[888, 1, 0.011172677005051054, 0.5586338502525526, 0, 0, 0],
[889, 1, 0.0030239439187460114, 0.15119719593730058, 0, 0, 0],
[890, 1, 0.0076394372684109755, 0.3819718634205488, 0, 0, 0],
[895, 1, 0.0030239439187460114, 0.15119719593730058, 0, 0, 0],
[896, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[897, 1, 0.01782535362629228, 0.891267681314614, 0, 0, 0],
[898, 1, 0.013464508185574344, 0.6732254092787172, 0, 0, 0],
[899, 1, 0.002705634032562221, 0.13528170162811107, 0, 0, 0],
[900, 1, 0.03584169318429482, 1.7920846592147412, 0, 0, 0],
[902, 1, 0.006207042780583919, 0.31035213902919595, 0, 0, 0],
[903, 1, 0.0031990143561470966, 0.15995071780735484, 0, 0, 0],
[905, 1, 0.021851973686517232, 1.0925986843258617, 0, 0, 0],
[906, 1, 0.010504226244065093, 0.5252113122032547, 0, 0, 0],
[907, 1, 0.02142225534016911, 1.0711127670084555, 0, 0, 0],
[909, 1, 0.005856901905781748, 0.2928450952890874, 0, 0, 0],
[913, 1, 0.02355493157760051, 1.1777465788800257, 0, 0, 0],
[915, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[917, 1, 0.005411268065124442, 0.27056340325622213, 0, 0, 0],
[918, 1, 0.012254930618075942, 0.612746530903797, 0, 0, 0],
[920, 1, 0.0020371832715762603, 0.10185916357881303, 0, 0, 0],
[921, 1, 0.019735212943395024, 0.9867606471697512, 0, 0, 0],
[922, 1, 0.05220282133414166, 2.6101410667070835, 0, 0, 0],
[923, 1, 0.023236621691416718, 1.161831084570836, 0, 0, 0],
[924, 1, 0.0037242256683503506, 0.18621128341751753, 0, 0, 0],
[925, 1, 0.008276057040778557, 0.4138028520389279, 0, 0, 0],
[928, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[931, 1, 0.03455253814525047, 1.7276269072625237, 0, 0, 0],
[934, 1, 0.09421972631040204, 4.710986315520103, 0, 0, 0],
[935, 1, 0.007352958370845565, 0.36764791854227824, 0, 0, 0],
[936, 1, 0.016615776058793875, 0.8307888029396938, 0, 0, 0],
[937, 1, 0.00477464829275686, 0.238732414637843, 0, 0, 0],
[939, 1, 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0],
[940, 1, 0.009421972631040205, 0.47109863155201026, 0, 0, 0],
[942, 1, 0.016520283092938737, 0.8260141546469368, 0, 0, 0],
[944, 1, 0.004042535554534142, 0.2021267777267071, 0, 0, 0],
[945, 1, 0.011140846016432674, 0.5570423008216338, 0, 0, 0],
[948, 1, 0.025146481008519465, 1.2573240504259733, 0, 0, 0],
[950, 1, 0.005092958178940651, 0.25464790894703254, 0, 0, 0],
[952, 1, 0.005045211696013082, 0.2522605848006541, 0, 0, 0],
[956, 1, 0.020690142601946394, 1.0345071300973196, 0, 0, 0],
[957, 1, 0.0019098593171027439, 0.0954929658551372, 0, 0, 0],
[958, 1, 0.010615634704229418, 0.530781735211471, 0, 0, 0],
[959, 1, 0.007241549910681238, 0.3620774955340619, 0, 0, 0],
[960, 1, 0.004217605991935227, 0.21088029959676136, 0, 0, 0],
[963, 1, 0.2785211504108168, 13.926057520540843, 0, 0, 0],
[965, 1, 0.11204507993669433, 5.602253996834716, 0, 0, 0],
[966, 1, 0.021008452488130186, 1.0504226244065094, 0, 0, 0],
[967, 1, 0.01193662073189215, 0.5968310365946076, 0, 0, 0],
[968, 1, 0.017188733853924696, 0.8594366926962349, 0, 0, 0],
[969, 1, 0.018111832523857688, 0.9055916261928845, 0, 0, 0],
[971, 1, 0.0031830988618379067, 0.15915494309189535, 0, 0, 0],
[972, 1, 0.12414085561167836, 6.207042780583918, 0, 0, 0],
[973, 1, 0.4287634166895661, 21.438170834478306, 0, 0, 0],
[975, 1, 0.01671126902464901, 0.8355634512324506, 0, 0, 0],
[976, 1, 0.008562535938343968, 0.4281267969171984, 0, 0, 0],
[977, 1, 0.1031324031235482, 5.15662015617741, 0, 0, 0],
[978, 1, 0.0007321127382227185, 0.03660563691113593, 0, 0, 0],
[981, 1, 0.03787887645587108, 1.8939438227935543, 0, 0, 0],
[982, 1, 0.0015756339366097638, 0.07878169683048819, 0, 0, 0],
[983, 1, 0.01400563499208679, 0.7002817496043395, 0, 0, 0],
[984, 1, 0.14801409707546268, 7.400704853773133, 0, 0, 0],
[985, 1, 0.0035014087480216977, 0.17507043740108488, 0, 0, 0],
[986, 1, 0.0017825353626292277, 0.08912676813146138, 0, 0, 0],
[987, 1, 0.02618098813861678, 1.3090494069308392, 0, 0, 0],
[988, 1, 0.0008116902097686662, 0.04058451048843331, 0, 0, 0],
[990, 1, 0.0954929658551372, 4.7746482927568605, 0, 0, 0],
[993, 1, 0.06238873769202297, 3.119436884601149, 0, 0, 0],
[994, 1, 0.010504226244065093, 0.5252113122032547, 0, 0, 0],
[995, 1, 0.0006684507609859605, 0.033422538049298026, 0, 0, 0],
[996, 1, 0.003660563691113593, 0.18302818455567965, 0, 0, 0],
[997, 1, 0.005984225860255264, 0.2992112930127632, 0, 0, 0],
[998, 1, 0.13464508185574348, 6.732254092787174, 0, 0, 0],
[999, 1, 0.004965634224467135, 0.24828171122335674, 0, 0, 0],
[1000, 1, 0.015597184423005743, 0.7798592211502873, 0, 0, 0],
[1002, 1, 0.0031512678732195276, 0.15756339366097638, 0, 0, 0],
[1003, 1, 0.2864788975654116, 14.32394487827058, 0, 0, 0],
[1007, 1, 0.007416620348082323, 0.37083101740411617, 0, 0, 0],
[1008, 1, 0.015597184423005743, 0.7798592211502873, 0, 0, 0],
[1010, 1, 0.238732414637843, 11.93662073189215, 0, 0, 0],
[1011, 1, 0.005952394871636886, 0.2976197435818443, 0, 0, 0],
[1012, 1, 0.9024085273310466, 45.12042636655233, 0, 0, 0],
[1018, 1, 0.05599070897972878, 2.7995354489864392, 0, 0, 0],
[1019, 1, 0.03819718634205488, 1.909859317102744, 0, 0, 0],
[1023, 1, 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0],
[1025, 1, 0.03616000307047862, 1.808000153523931, 0, 0, 0],
[1026, 1, 0.20868396138209316, 10.434198069104658, 0, 0, 0],
[1027, 3, 0.003074873500535418, 0.15374367502677092, 2.22, 61.69, 0.004502],
[1028, 2, 0.025464790894703257, 1.273239544735163, 0, 0, 0],
[1029, 2, 0.003819718634205488, 0.19098593171027442, 0, 0, 0],
[1030, 2, 0.06480789282701978, 3.2403946413509894, 0, 0, 0],
[1031, 2, 0.0921316134570364, 4.60658067285182, 0, 0, 0],
[1032, 2, 0.009772775025341927, 0.4886387512670964, 0, 0, 0],
[1033, 2, 0.0026465383981801338, 0.1323269199090067, 0, 0, 0],
[1034, 2, 0.005364335122251813, 0.26821675611259066, 0, 0, 0],
[1035, 3, 0.00317587127473044, 0.158793563736522, 2.22, 61.69, 0.004502],
[1036, 2, 0.003979401770097368, 0.19897008850486841, 0, 0, 0],
[1037, 2, 0.0060277734620055035, 0.3013886731002752, 0, 0, 0],
[1038, 2, 0.005462103769994554, 0.2731051884997277, 0, 0, 0],
[1039, 2, 0.005913400500746229, 0.2956700250373115, 0, 0, 0],
[1041, 2, 0.008736705901893021, 0.4368352950946511, 0, 0, 0],
[1042, 2, 0.002236240074990482, 0.1118120037495241, 0, 0, 0],
[1044, 3, 0.0023022419250361527, 0.11511209625180763, 2.22, 61.69, 0.004502],
[1046, 2, 0.00679827557108513, 0.33991377855425653, 0, 0, 0],
[1047, 3, 0.0008294889076348922, 0.04147444538174461, 2.22, 61.69, 0.004502],
[1048, 2, 0.004561818873896339, 0.22809094369481697, 0, 0, 0],
[1049, 2, 0.01870104799381521, 0.9350523996907605, 0, 0, 0],
[1050, 2, 0.001674586221361763, 0.08372931106808816, 0, 0, 0],
[1051, 2, 0.008610220286935111, 0.4305110143467556, 0, 0, 0],
[1052, 3, 0.001315809692296204, 0.06579048461481019, 2.22, 61.69, 0.004502],
[1053, 3, 0.001042024786453249, 0.05210123932266245, 2.22, 61.69, 0.004502],
[1054, 2, 0.017434200209443074, 0.8717100104721537, 0, 0, 0],
[1055, 3, 0.00011871244168422753, 0.005935622084211377, 2.22, 61.69, 0.004502],
[1056, 2, 0.022965347424951363, 1.1482673712475682, 0, 0, 0],
[1057, 2, 0.02718238967557453, 1.3591194837787268, 0, 0, 0],
[1058, 2, 0.04461485898591968, 2.2307429492959843, 0, 0, 0],
[1059, 2, 0.017013874249418158, 0.8506937124709079, 0, 0, 0],
[1060, 3, 0.0003260901937161927, 0.016304509685809633, 2.22, 61.69, 0.004502],
[1061, 2, 0.005436368167959151, 0.27181840839795757, 0, 0, 0],
[1062, 3, 0.00011488919588970951, 0.005744459794485476, 2.22, 61.69, 0.004502],
[1063, 3, 0.0003425274483539234, 0.01712637241769617, 2.22, 61.69, 0.004502],
[1064, 2, 0.008174693602404245, 0.4087346801202122, 0, 0, 0],
[1065, 2, 0.014487023099809197, 0.7243511549904599, 0, 0, 0],
[1066, 2, 0.005099925117482008, 0.2549962558741004, 0, 0, 0],
[1067, 3, 0.0008283924405670749, 0.04141962202835374, 2.22, 61.69, 0.004502],
[1072, 2, 0.007168748144119091, 0.3584374072059546, 0, 0, 0],
[1073, 2, 0.004954025493475761, 0.24770127467378808, 0, 0, 0],
[1074, 2, 0.009778033156939965, 0.48890165784699824, 0, 0, 0],
[1077, 3, 0.0007664457877913659, 0.0383222893895683, 2.22, 61.69, 0.004502],
[1079, 2, 0.004604543003215469, 0.23022715016077344, 0, 0, 0],
[1080, 2, 0.003349647097644276, 0.1674823548822138, 0, 0, 0],
[1081, 2, 0.01601172754276052, 0.800586377138026, 0, 0, 0],
[1082, 2, 0.016964047639621335, 0.8482023819810669, 0, 0, 0],
[1083, 2, 0.02162958181089171, 1.0814790905445855, 0, 0, 0],
[1084, 2, 0.019857016058101837, 0.992850802905092, 0, 0, 0],
[1085, 2, 0.005758465971105609, 0.2879232985552805, 0, 0, 0],
[1086, 2, 0.011188498437811297, 0.5594249218905649, 0, 0, 0],
[1087, 2, 0.00397539235779677, 0.19876961788983852, 0, 0, 0],
[1088, 3, 0.0013881136481632718, 0.06940568240816358, 2.22, 61.69, 0.004502],
[1089, 2, 0.01263503907246808, 0.631751953623404, 0, 0, 0],
[1090, 2, 0.005674885746854652, 0.2837442873427326, 0, 0, 0],
[1091, 3, 0.002915330196419503, 0.14576650982097517, 2.22, 61.69, 0.004502],
[1092, 2, 0.003437876146252996, 0.1718938073126498, 0, 0, 0],
[1093, 2, 0.009906140914748767, 0.49530704573743833, 0, 0, 0],
[1094, 3, 0.00023930778294026586, 0.011965389147013294, 2.22, 61.69, 0.004502],
[1095, 3, 1.3047613994501091e-05, 0.0006523806997250545, 2.22, 61.69, 0.004502],
[1096, 2, 0.005379826679377905, 0.2689913339688953, 0, 0, 0],
[1097, 3, 0.0002929164939619051, 0.014645824698095257, 2.22, 61.69, 0.004502],
[1098, 2, 0.004521623727146264, 0.22608118635731317, 0, 0, 0],
[1099, 2, 0.018521637260932335, 0.9260818630466169, 0, 0, 0],
[1101, 2, 0.005343192104787693, 0.2671596052393847, 0, 0, 0],
[1102, 2, 0.02234407998394998, 1.1172039991974991, 0, 0, 0],
[1103, 2, 0.01562148424141561, 0.7810742120707805, 0, 0, 0],
[1104, 3, 1.3172819714966009e-05, 0.0006586409857483004, 2.22, 61.69, 0.004502],
[1105, 3, 0.0001386935566767763, 0.006934677833838815, 2.22, 61.69, 0.004502],
[1106, 3, 0.00014577275883068604, 0.0072886379415343025, 2.22, 61.69, 0.004502],
[1107, 2, 0.003391514823097816, 0.16957574115489077, 0, 0, 0],
[1108, 2, 0.015741564572850766, 0.7870782286425384, 0, 0, 0],
[1109, 3, 4.9542410867097304e-05, 0.002477120543354865, 2.22, 61.69, 0.004502],
[1110, 3, 0.00010533237807450261, 0.00526661890372513, 2.22, 61.69, 0.004502],
[1111, 2, 0.003682113867455725, 0.18410569337278626, 0, 0, 0],
[1112, 2, 0.004426690383932842, 0.2213345191966421, 0, 0, 0],
[1113, 3, 0.00022513170529279912, 0.011256585264639957, 2.22, 61.69, 0.004502],
[1114, 3, 0.0005550707533170727, 0.027753537665853634, 2.22, 61.69, 0.004502],
[1115, 2, 0.0032197222090973076, 0.16098611045486538, 0, 0, 0],
[1116, 3, 0.002075453185310181, 0.10377265926550905, 2.22, 61.69, 0.004502],
[1117, 2, 0.005780032679669937, 0.2890016339834969, 0, 0, 0],
[1118, 3, 0.0005199122415272909, 0.025995612076364544, 2.22, 61.69, 0.004502],
[1119, 3, 0.0027536366373517632, 0.13768183186758817, 2.22, 61.69, 0.004502],
[1120, 3, 0.0001538074296570127, 0.007690371482850636, 2.22, 61.69, 0.004502],
[1121, 3, 3.4414977793908876e-05, 0.0017207488896954439, 2.22, 61.69, 0.004502],
[1122, 3, 9.313004041299959e-05, 0.00465650202064998, 2.22, 61.69, 0.004502],
[1123, 3, 8.936930538294867e-05, 0.004468465269147434, 2.22, 61.69, 0.004502],
[1124, 3, 8.201464578534214e-05, 0.004100732289267108, 2.22, 61.69, 0.004502],
[1125, 3, 0.001588801827253252, 0.07944009136266261, 2.22, 61.69, 0.004502],
[1126, 3, 0.0018426380603240493, 0.09213190301620247, 2.22, 61.69, 0.004502],
[1127, 2, 0.006703391093283916, 0.3351695546641958, 0, 0, 0],
[1128, 3, 0.0001948941120002845, 0.009744705600014225, 2.22, 61.69, 0.004502],
[1129, 3, 0.0003016780123772693, 0.015083900618863466, 2.22, 61.69, 0.004502],
[1130, 3, 6.530151955301432e-05, 0.003265075977650716, 2.22, 61.69, 0.004502],
[1131, 3, 0.00018443373362804407, 0.009221686681402204, 2.22, 61.69, 0.004502],
[1132, 3, 2.2886271300209156e-05, 0.0011443135650104578, 2.22, 61.69, 0.004502],
[1133, 3, 4.5810964480308454e-05, 0.002290548224015423, 2.22, 61.69, 0.004502],
[1134, 3, 3.236913111220881e-05, 0.0016184565556104404, 2.22, 61.69, 0.004502],
[1135, 3, 0.0004528723014143959, 0.022643615070719797, 2.22, 61.69, 0.004502],
[1136, 3, 2.5636662405410735e-05, 0.0012818331202705368, 2.22, 61.69, 0.004502],
[1137, 3, 0.00016790582753671083, 0.008395291376835541, 2.22, 61.69, 0.004502],
[1138, 3, 7.98498118498449e-05, 0.003992490592492246, 2.22, 61.69, 0.004502],
[1139, 3, 0.0012619566606414858, 0.0630978330320743, 2.22, 61.69, 0.004502],
[1140, 3, 0.0013901725262468376, 0.06950862631234189, 2.22, 61.69, 0.004502],
[1141, 2, 0.0076053500901520025, 0.38026750450760016, 0, 0, 0],
[1142, 3, 7.73959943559724e-05, 0.00386979971779862, 2.22, 61.69, 0.004502],
[1143, 3, 0.001326344775515579, 0.06631723877577896, 2.22, 61.69, 0.004502],
[1144, 2, 0.00334399697192306, 0.16719984859615303, 0, 0, 0],
[1145, 2, 0.011197481443497569, 0.5598740721748785, 0, 0, 0],
[1146, 3, 5.4833151376821656e-05, 0.002741657568841083, 2.22, 61.69, 0.004502],
[1147, 3, 0.002909588342312674, 0.14547941711563372, 2.22, 61.69, 0.004502],
[1148, 3, 0.0011233492673683868, 0.05616746336841934, 2.22, 61.69, 0.004502],
[1149, 3, 0.0005447417794635118, 0.02723708897317559, 2.22, 61.69, 0.004502],
[1150, 3, 0.0002306193019977063, 0.011530965099885314, 2.22, 61.69, 0.004502],
[1151, 3, 0.0008299047575760064, 0.04149523787880033, 2.22, 61.69, 0.004502],
[1152, 3, 7.417749437366368e-06, 0.0003708874718683184, 2.22, 61.69, 0.004502],
[1153, 3, 4.283385139953296e-06, 0.0002141692569976648, 2.22, 61.69, 0.004502],
[1154, 3, 1.0001936259040478e-05, 0.0005000968129520238, 2.22, 61.69, 0.004502],
[1155, 3, 3.879887736397654e-05, 0.001939943868198827, 2.22, 61.69, 0.004502],
[1156, 3, 0.0010200134924871187, 0.05100067462435595, 2.22, 61.69, 0.004502],
[1157, 3, 0.00027719360593007886, 0.013859680296503944, 2.22, 61.69, 0.004502],
[1158, 3, 6.640198284893194e-05, 0.003320099142446597, 2.22, 61.69, 0.004502],
[1159, 3, 0.0008593149079194712, 0.04296574539597356, 2.22, 61.69, 0.004502],
[1160, 2, 0.015175599618213626, 0.7587799809106813, 0, 0, 0],
[1161, 3, 0.0008335971783564253, 0.04167985891782127, 2.22, 61.69, 0.004502],
[1162, 2, 0.02334015009089389, 1.1670075045446946, 0, 0, 0],
[1163, 2, 0.014481760844263846, 0.7240880422131923, 0, 0, 0],
[1164, 2, 0.01586368621264448, 0.793184310632224, 0, 0, 0],
[1165, 2, 0.0025257844262807964, 0.12628922131403983, 0, 0, 0],
[1166, 2, 0.005301588846150501, 0.26507944230752506, 0, 0, 0],
[1167, 3, 0.00032173361521807824, 0.016086680760903912, 2.22, 61.69, 0.004502],
[1168, 3, 8.56746647323757e-05, 0.004283733236618785, 2.22, 61.69, 0.004502],
[1169, 3, 0.00017327803824915608, 0.008663901912457804, 2.22, 61.69, 0.004502],
[1170, 3, 1.6933420442211857e-05, 0.000846671022110593, 2.22, 61.69, 0.004502],
[1173, 2, 0.01618626952698487, 0.8093134763492436, 0, 0, 0],
[1174, 3, 8.021928882473966e-05, 0.004010964441236983, 2.22, 61.69, 0.004502],
[1175, 3, 5.445989361520192e-05, 0.002722994680760096, 2.22, 61.69, 0.004502],
[1176, 3, 1.4783581244732665e-05, 0.0007391790622366333, 2.22, 61.69, 0.004502],
[1177, 3, 0.0017745146198091144, 0.08872573099045572, 2.22, 61.69, 0.004502],
[1178, 3, 0.0001923728167601116, 0.00961864083800558, 2.22, 61.69, 0.004502],
[1179, 3, 8.316119408334767e-05, 0.004158059704167384, 2.22, 61.69, 0.004502],
[1180, 3, 4.3834108298364086e-05, 0.002191705414918204, 2.22, 61.69, 0.004502],
[1181, 2, 0.00545834972439398, 0.272917486219699, 0, 0, 0],
[1182, 2, 0.006322880792722177, 0.3161440396361089, 0, 0, 0],
[1183, 3, 0.00177138301503702, 0.08856915075185101, 2.22, 61.69, 0.004502],
[1184, 3, 0.0002382585530365376, 0.01191292765182688, 2.22, 61.69, 0.004502],
[1185, 3, 0.0007221796423758263, 0.036108982118791315, 2.22, 61.69, 0.004502],
[1186, 3, 0.0024774929167619207, 0.12387464583809603, 2.22, 61.69, 0.004502],
[1187, 3, 0.0006248151564821885, 0.031240757824109424, 2.22, 61.69, 0.004502],
[1188, 2, 0.011440868435801076, 0.5720434217900537, 0, 0, 0],
[1189, 3, 0.001075762722956362, 0.0537881361478181, 2.22, 61.69, 0.004502],
[1190, 2, 0.005589994050160443, 0.2794997025080222, 0, 0, 0],
[1191, 2, 0.0018543296854580205, 0.09271648427290104, 0, 0, 0],
[1196, 2, 0.010230349597894291, 0.5115174798947145, 0, 0, 0],
[1197, 2, 0.005767282789943071, 0.2883641394971536, 0, 0, 0],
[1198, 3, 0.002534966273924786, 0.12674831369623932, 2.22, 61.69, 0.004502],
[1199, 2, 0.012822920004466005, 0.6411460002233003, 0, 0, 0],
[1200, 2, 0.0035658606694853635, 0.1782930334742682, 0, 0, 0],
[1203, 2, 0.004628517197038981, 0.23142585985194902, 0, 0, 0],
[1204, 3, 0.0023050069174568553, 0.11525034587284279, 2.22, 61.69, 0.004502],
[1211, 3, 0.00045660111641118554, 0.022830055820559275, 2.22, 61.69, 0.004502],
[1212, 2, 0.002310697165483375, 0.11553485827416875, 0, 0, 0],
[1213, 2, 0.001571453208551938, 0.0785726604275969, 0, 0, 0],
[1214, 3, 0.00011420293137312512, 0.005710146568656256, 2.22, 61.69, 0.004502],
[1215, 3, 6.379928530672539e-05, 0.0031899642653362694, 2.22, 61.69, 0.004502],
[1216, 2, 0.001869892681863531, 0.09349463409317656, 0, 0, 0],
[1217, 3, 0.0009267444929459551, 0.04633722464729775, 2.22, 61.69, 0.004502],
[1218, 3, 2.5227972538599323e-05, 0.0012613986269299662, 2.22, 61.69, 0.004502],
[1219, 3, 0.0007855588922898729, 0.03927794461449365, 2.22, 61.69, 0.004502],
[1220, 3, 0.0013820054686401347, 0.06910027343200674, 2.22, 61.69, 0.004502],
[1221, 2, 0.015352878695497882, 0.7676439347748941, 0, 0, 0],
[1222, 2, 0.006253768855699434, 0.3126884427849717, 0, 0, 0],
[1225, 3, 0.0010446814701628646, 0.05223407350814323, 2.22, 61.69, 0.004502],
[1226, 3, 0.00014078918131144803, 0.007039459065572402, 2.22, 61.69, 0.004502],
[1228, 3, 7.674774797726922e-05, 0.003837387398863461, 2.22, 61.69, 0.004502],
[1229, 2, 0.00326230849376, 0.16311542468800003, 0, 0, 0],
[1230, 3, 4.264866012739944e-05, 0.002132433006369972, 2.22, 61.69, 0.004502],
[1231, 3, 0.0011074075337247616, 0.05537037668623808, 2.22, 61.69, 0.004502],
[1232, 2, 0.0025289299564359583, 0.12644649782179793, 0, 0, 0],
[1233, 2, 0.03662908231521014, 1.831454115760507, 0, 0, 0],
[1235, 3, 0.0005753349157073776, 0.028766745785368877, 2.22, 61.69, 0.004502],
[1236, 2, 0.005234608320670995, 0.26173041603354974, 0, 0, 0],
[1237, 3, 0.0005991995096878405, 0.02995997548439202, 2.22, 61.69, 0.004502],
[1238, 2, 0.005358277784741974, 0.26791388923709875, 0, 0, 0],
[1239, 3, 0.0001443666373276477, 0.007218331866382386, 2.22, 61.69, 0.004502],
[1240, 2, 0.01429475266593171, 0.7147376332965855, 0, 0, 0],
[1241, 2, 0.012239428692174842, 0.6119714346087421, 0, 0, 0],
[1242, 3, 0.0009261376778324836, 0.04630688389162418, 2.22, 61.69, 0.004502],
[1243, 2, 0.0030479476517051274, 0.15239738258525637, 0, 0, 0],
[1244, 2, 0.020592901244747865, 1.0296450622373932, 0, 0, 0],
[1245, 3, 0.0003407395317026344, 0.017036976585131723, 2.22, 61.69, 0.004502],
[1246, 2, 0.003636870278584459, 0.18184351392922293, 0, 0, 0],
[1247, 3, 0.0005536878435284997, 0.027684392176424988, 2.22, 61.69, 0.004502],
[1248, 2, 0.005854245631350222, 0.2927122815675111, 0, 0, 0],
[1249, 2, 0.0029138707379534994, 0.14569353689767497, 0, 0, 0],
[1250, 3, 0.0011051662697331927, 0.055258313486659626, 2.22, 61.69, 0.004502],
[1251, 3, 0.0006892543892280731, 0.034462719461403654, 2.22, 61.69, 0.004502],
[1252, 3, 0.0004933226435696849, 0.02466613217848425, 2.22, 61.69, 0.004502],
[1253, 2, 0.0033963585596517073, 0.16981792798258535, 0, 0, 0],
[1254, 2, 0.005238024431161238, 0.2619012215580619, 0, 0, 0],
[1255, 3, 0.0002152231180033463, 0.010761155900167315, 2.22, 61.69, 0.004502],
[1256, 3, 0.0008829260686159954, 0.04414630343079977, 2.22, 61.69, 0.004502],
[1257, 2, 0.005416876706065561, 0.2708438353032781, 0, 0, 0],
[1258, 2, 0.014991588973313675, 0.7495794486656838, 0, 0, 0],
[1259, 2, 0.006546118673323141, 0.32730593366615707, 0, 0, 0],
[1260, 3, 0.0008510469735902338, 0.042552348679511694, 2.22, 61.69, 0.004502],
[1261, 2, 0.005305411264712167, 0.2652705632356084, 0, 0, 0],
[1267, 3, 0.0012287427693400252, 0.06143713846700125, 2.22, 61.69, 0.004502],
[1274, 2, 0.001691611211165477, 0.08458056055827386, 0, 0, 0],
[1275, 2, 0.0038666125605823546, 0.19333062802911774, 0, 0, 0],
[1276, 3, 0.0009102374698035218, 0.04551187349017608, 2.22, 61.69, 0.004502],
[1277, 2, 0.0023965297543892265, 0.11982648771946133, 0, 0, 0],
[1278, 2, 0.006398316507252847, 0.31991582536264235, 0, 0, 0],
[1282, 3, 0.0001105941762774276, 0.00552970881387138, 2.22, 61.69, 0.004502],
[1283, 2, 0.08261824948992594, 4.130912474496298, 0, 0, 0],
[1287, 2, 0.003083233730049012, 0.1541616865024506, 0, 0, 0],
[1288, 2, 0.004640611077226182, 0.23203055386130914, 0, 0, 0],
[1289, 2, 0.004963561654090838, 0.24817808270454192, 0, 0, 0],
[1290, 3, 0.0001244867117459489, 0.006224335587297446, 2.22, 61.69, 0.004502],
[1291, 2, 0.003736373434735334, 0.1868186717367667, 0, 0, 0],
[1292, 3, 0.0011143622294130919, 0.05571811147065459, 2.22, 61.69, 0.004502],
[1293, 3, 8.952966571388897e-05, 0.004476483285694448, 2.22, 61.69, 0.004502],
[1294, 3, 0.00020936993212911583, 0.010468496606455793, 2.22, 61.69, 0.004502],
[1295, 3, 0.0002089734159756435, 0.010448670798782174, 2.22, 61.69, 0.004502],
[1300, 3, 0.0007801536738897055, 0.03900768369448528, 2.22, 61.69, 0.004502],
[1301, 2, 0.0019439262202234247, 0.09719631101117124, 0, 0, 0],
[1302, 3, 0.00012789433882958004, 0.006394716941479003, 2.22, 61.69, 0.004502],
[1303, 3, 0.00010996863751682274, 0.005498431875841137, 2.22, 61.69, 0.004502],
[1306, 3, 0.00011631130798083146, 0.005815565399041573, 2.22, 61.69, 0.004502],
[1307, 3, 1.9031130574577255e-05, 0.0009515565287288628, 2.22, 61.69, 0.004502],
[1308, 3, 0.0001224932857995621, 0.006124664289978106, 2.22, 61.69, 0.004502],
[1312, 2, 0.016696303623916272, 0.8348151811958137, 0, 0, 0],
[1317, 3, 0.0015252502049763412, 0.07626251024881707, 2.22, 61.69, 0.004502],
[1319, 3, 0.001127343871228203, 0.05636719356141015, 2.22, 61.69, 0.004502],
[1323, 2, 0.012675857799799822, 0.6337928899899912, 0, 0, 0],
[1326, 2, 0.003288096915491701, 0.16440484577458506, 0, 0, 0],
[1327, 2, 0.0032338308031027566, 0.16169154015513784, 0, 0, 0],
[1328, 3, 0.0010226241895011407, 0.05113120947505704, 2.22, 61.69, 0.004502],
[1331, 3, 1.841349064624893e-05, 0.0009206745323124464, 2.22, 61.69, 0.004502],
[1336, 3, 0.0008820603397680993, 0.04410301698840497, 2.22, 61.69, 0.004502],
[1337, 2, 0.007722987880773172, 0.3861493940386586, 0, 0, 0],
[1339, 3, 0.0006387594087649589, 0.03193797043824795, 2.22, 61.69, 0.004502],
[1340, 2, 0.004462598113304154, 0.22312990566520774, 0, 0, 0],
[1346, 2, 0.010970124373846759, 0.548506218692338, 0, 0, 0],
[1348, 3, 0.0014456315404578254, 0.07228157702289127, 2.22, 61.69, 0.004502],
[1349, 3, 0.0026962338610516797, 0.13481169305258398, 2.22, 61.69, 0.004502],
[1356, 2, 0.0034369953484322496, 0.17184976742161248, 0, 0, 0],
[1357, 2, 0.002662266539247354, 0.13311332696236772, 0, 0, 0],
[1359, 2, 0.0023306710292170787, 0.11653355146085395, 0, 0, 0],
[1360, 3, 0.0010909105792324338, 0.054545528961621695, 2.22, 61.69, 0.004502],
[1361, 2, 0.0040238936307783425, 0.20119468153891715, 0, 0, 0],
[1362, 2, 0.005036121783141224, 0.2518060891570612, 0, 0, 0],
[1363, 3, 1.053265313635017e-06, 5.266326568175085e-05, 2.22, 61.69, 0.004502],
[1364, 3, 1.7153235992295212e-06, 8.576617996147605e-05, 2.22, 61.69, 0.004502],
[1365, 3, 1.4382678391388228e-08, 7.191339195694115e-07, 2.22, 61.69, 0.004502],
[1366, 3, 4.567454523924795e-05, 0.0022837272619623972, 2.22, 61.69, 0.004502],
[1372, 2, 0.005918410111015705, 0.29592050555078525, 0, 0, 0],
[1373, 3, 0.0010699135939801641, 0.05349567969900822, 2.22, 61.69, 0.004502],
[1374, 2, 0.006889508467327262, 0.3444754233663631, 0, 0, 0],
[1375, 2, 0.003897629175102736, 0.1948814587551368, 0, 0, 0],
[1376, 2, 0.011218109707548912, 0.5609054853774457, 0, 0, 0],
[1377, 2, 0.01492085689824784, 0.7460428449123921, 0, 0, 0],
[1378, 2, 0.014711861690612471, 0.7355930845306237, 0, 0, 0],
[1379, 3, 4.570772978988336e-05, 0.0022853864894941682, 2.22, 61.69, 0.004502],
[1380, 3, 7.724465320438908e-05, 0.003862232660219454, 2.22, 61.69, 0.004502],
[1381, 3, 5.9312910906981426e-05, 0.0029656455453490713, 2.22, 61.69, 0.004502],
[1382, 2, 0.005563903887757258, 0.27819519438786294, 0, 0, 0],
[1383, 2, 0.00682767638336331, 0.3413838191681655, 0, 0, 0],
[1384, 3, 0.0002972463393517766, 0.014862316967588829, 2.22, 61.69, 0.004502],
[1385, 3, 7.763953914385516e-06, 0.0003881976957192759, 2.22, 61.69, 0.004502],
[1386, 3, 4.2899112828393286e-05, 0.002144955641419664, 2.22, 61.69, 0.004502],
[1387, 3, 0.00022240699424911273, 0.011120349712455638, 2.22, 61.69, 0.004502],
[1388, 3, 5.909025672850305e-05, 0.0029545128364251525, 2.22, 61.69, 0.004502],
[1389, 3, 1.3594135764164036e-05, 0.0006797067882082019, 2.22, 61.69, 0.004502],
[1390, 3, 0.00023763846235409512, 0.011881923117704758, 2.22, 61.69, 0.004502],
[1391, 3, 3.321367742134543e-05, 0.0016606838710672715, 2.22, 61.69, 0.004502],
[1392, 3, 0.0012290826914265437, 0.06145413457132718, 2.22, 61.69, 0.004502],
[1393, 3, 8.111443639320659e-05, 0.00405572181966033, 2.22, 61.69, 0.004502],
[1394, 3, 6.656099436847732e-05, 0.0033280497184238656, 2.22, 61.69, 0.004502],
[1395, 3, 4.381412847320234e-06, 0.00021907064236601173, 2.22, 61.69, 0.004502],
[1396, 3, 1.3808034609766036e-06, 6.904017304883018e-05, 2.22, 61.69, 0.004502],
[1397, 3, 0.0015969317375463513, 0.07984658687731756, 2.22, 61.69, 0.004502],
[1398, 3, 0.00017695743260373348, 0.008847871630186674, 2.22, 61.69, 0.004502],
[1399, 3, 0.0011375222056992432, 0.05687611028496216, 2.22, 61.69, 0.004502],
[1400, 3, 7.618867997042728e-05, 0.0038094339985213638, 2.22, 61.69, 0.004502],
[1401, 2, 0.005687529053514607, 0.28437645267573036, 0, 0, 0],
[1402, 3, 0.001676149990745289, 0.08380749953726446, 2.22, 61.69, 0.004502],
[1403, 2, 0.007617262031172502, 0.38086310155862513, 0, 0, 0],
[1404, 2, 0.008581667499251882, 0.42908337496259413, 0, 0, 0],
[1405, 3, 0.0018812625008740895, 0.09406312504370447, 2.22, 61.69, 0.004502],
[1406, 3, 0.0006852566793279422, 0.03426283396639711, 2.22, 61.69, 0.004502],
[1407, 3, 9.408131582260726e-06, 0.00047040657911303626, 2.22, 61.69, 0.004502],
[1408, 3, 0.001981558589185328, 0.09907792945926643, 2.22, 61.69, 0.004502],
[1409, 3, 0.0005556437532243559, 0.027782187661217796, 2.22, 61.69, 0.004502],
[1410, 3, 0.0018249000205853422, 0.09124500102926711, 2.22, 61.69, 0.004502],
[1411, 3, 0.002128337887273, 0.10641689436365001, 2.22, 61.69, 0.004502],
[1412, 3, 0.0001556187955145351, 0.007780939775726756, 2.22, 61.69, 0.004502],
[1413, 3, 0.00014666682461596226, 0.007333341230798113, 2.22, 61.69, 0.004502],
[1414, 3, 0.000658771107384773, 0.032938555369238655, 2.22, 61.69, 0.004502],
[1418, 2, 0.004554955356465112, 0.2277477678232556, 0, 0, 0],
[1419, 3, 0.0015414725788113375, 0.07707362894056687, 2.22, 61.69, 0.004502],
[1421, 3, 0.00017979168856692174, 0.008989584428346088, 2.22, 61.69, 0.004502],
[1422, 3, 0.00012256633129127437, 0.006128316564563719, 2.22, 61.69, 0.004502],
[1423, 3, 4.9296505077127586e-05, 0.0024648252538563794, 2.22, 61.69, 0.004502],
[1424, 2, 0.01394783725195249, 0.6973918625976245, 0, 0, 0],
[1425, 3, 0.0013602274146640447, 0.06801137073320224, 2.22, 61.69, 0.004502],
[1426, 2, 0.0041334084484743, 0.20667042242371503, 0, 0, 0],
[1427, 2, 0.019959940478923573, 0.9979970239461788, 0, 0, 0],
[1428, 2, 0.013355559786648664, 0.6677779893324334, 0, 0, 0],
[1431, 2, 0.014493414492796078, 0.724670724639804, 0, 0, 0],
[1432, 3, 0.0007676953741931287, 0.03838476870965644, 2.22, 61.69, 0.004502],
[1433, 2, 0.08207564315805406, 4.103782157902703, 0, 0, 0],
[1434, 2, 0.006330547929406013, 0.3165273964703006, 0, 0, 0],
[1435, 2, 0.005520334862536408, 0.2760167431268204, 0, 0, 0],
[1436, 2, 0.006266510483771511, 0.31332552418857557, 0, 0, 0],
[1437, 2, 0.006731984814882108, 0.3365992407441054, 0, 0, 0],
[1438, 2, 0.0161133113991622, 0.8056655699581102, 0, 0, 0],
[1439, 2, 0.0063091033600462575, 0.3154551680023129, 0, 0, 0],
[1440, 3, 3.334110448446746e-05, 0.0016670552242233731, 2.22, 61.69, 0.004502],
[1443, 2, 0.006557506818224797, 0.3278753409112398, 0, 0, 0],
[1446, 2, 0.024519578499182584, 1.2259789249591293, 0, 0, 0],
[1447, 2, 0.0023268276390894026, 0.11634138195447014, 0, 0, 0],
[1448, 3, 0.00047896583949883246, 0.023948291974941624, 2.22, 61.69, 0.004502],
[1449, 2, 0.006075750962706547, 0.3037875481353274, 0, 0, 0],
[1450, 2, 0.0037724056227270084, 0.18862028113635043, 0, 0, 0],
[1451, 2, 0.0043416728967246255, 0.21708364483623127, 0, 0, 0],
[1452, 3, 0.0015322750739690742, 0.0766137536984537, 2.22, 61.69, 0.004502],
[1453, 2, 0.0016458121717638546, 0.08229060858819273, 0, 0, 0],
[1454, 2, 0.004682929067992207, 0.2341464533996104, 0, 0, 0],
[1455, 3, 4.166284213856912e-05, 0.0020831421069284557, 2.22, 61.69, 0.004502],
[1456, 2, 0.0031865889687578697, 0.15932944843789354, 0, 0, 0],
[1457, 3, 0.00012749408723576006, 0.006374704361788003, 2.22, 61.69, 0.004502],
[1458, 3, 1.5673534819523866e-05, 0.0007836767409761935, 2.22, 61.69, 0.004502],
[1459, 3, 0.00031178936740549625, 0.015589468370274815, 2.22, 61.69, 0.004502],
[1460, 2, 0.003376889830190501, 0.16884449150952507, 0, 0, 0],
[1461, 3, 0.001142843079861875, 0.05714215399309376, 2.22, 61.69, 0.004502],
[1462, 3, 0.00015295973435731913, 0.007647986717865956, 2.22, 61.69, 0.004502],
[1463, 3, 4.5276834778775515e-05, 0.002263841738938776, 2.22, 61.69, 0.004502],
[1464, 2, 0.006606826758650888, 0.33034133793254444, 0, 0, 0],
[1465, 3, 0.0003374045759652472, 0.01687022879826236, 2.22, 61.69, 0.004502],
[1466, 3, 0.0003619193984034768, 0.01809596992017384, 2.22, 61.69, 0.004502],
[1467, 3, 0.00013344536897072216, 0.006672268448536108, 2.22, 61.69, 0.004502],
[1468, 3, 0.0015144656821575462, 0.0757232841078773, 2.22, 61.69, 0.004502],
[1469, 2, 0.0033435340956597163, 0.16717670478298582, 0, 0, 0],
[1470, 2, 0.005027084884666319, 0.2513542442333159, 0, 0, 0],
[1471, 2, 0.010132763321185349, 0.5066381660592674, 0, 0, 0],
[1472, 3, 0.0007626820845032627, 0.03813410422516314, 2.22, 61.69, 0.004502],
[1473, 3, 0.0005323801851315335, 0.026619009256576683, 2.22, 61.69, 0.004502],
[1474, 3, 8.905977123682595e-05, 0.004452988561841298, 2.22, 61.69, 0.004502],
[1475, 3, 2.4884191103347185e-05, 0.0012442095551673594, 2.22, 61.69, 0.004502],
[1476, 2, 0.015946059282369706, 0.7973029641184852, 0, 0, 0],
[1477, 3, 0.0007376196482685025, 0.03688098241342513, 2.22, 61.69, 0.004502],
[1482, 3, 0.0004523453643744782, 0.02261726821872391, 2.22, 61.69, 0.004502],
[1483, 3, 0.0002291607516312977, 0.011458037581564884, 2.22, 61.69, 0.004502],
[1484, 3, 1.9041073525508303e-06, 9.520536762754152e-05, 2.22, 61.69, 0.004502],
[1485, 3, 3.5876538426778735e-05, 0.0017938269213389369, 2.22, 61.69, 0.004502],
[1486, 3, 0.00018457774197472868, 0.009228887098736434, 2.22, 61.69, 0.004502],
[1489, 3, 7.571817467557017e-06, 0.00037859087337785094, 2.22, 61.69, 0.004502],
[1490, 2, 0.04981318633597547, 2.4906593167987734, 0, 0, 0],
[1491, 2, 0.0030920025676765685, 0.15460012838382842, 0, 0, 0],
[1492, 2, 0.007177601132582883, 0.35888005662914413, 0, 0, 0],
[1493, 2, 0.0027270697137500854, 0.13635348568750427, 0, 0, 0],
[1494, 2, 0.016524815200932762, 0.8262407600466383, 0, 0, 0],
[1495, 2, 0.002876525951460364, 0.1438262975730182, 0, 0, 0],
[1500, 3, 5.0840365097147265e-06, 0.0002542018254857363, 2.22, 61.69, 0.004502],
[1501, 3, 0.00020699592758045344, 0.010349796379022674, 2.22, 61.69, 0.004502],
[1503, 3, 0.001165185399438024, 0.058259269971901194, 2.22, 61.69, 0.004502],
[1504, 2, 0.004794675188738244, 0.23973375943691222, 0, 0, 0],
[1512, 2, 0.001625501569891568, 0.08127507849457842, 0, 0, 0],
[1513, 3, 0.0006523381548315222, 0.03261690774157611, 2.22, 61.69, 0.004502],
[1518, 3, 3.7867310041709484e-05, 0.0018933655020854743, 2.22, 61.69, 0.004502],
[1519, 3, 2.6282109502563467e-06, 0.00013141054751281735, 2.22, 61.69, 0.004502]
])
ppc["branch_switch"] = array([
[586, 1, 0 ],
[589, 108, 0 ],
[590, 108, 0 ],
[593, 112, 0 ],
[594, 114, 0 ],
[595, 115, 0 ],
[597, 118, 0 ],
[598, 118, 0 ],
[599, 119, 0 ],
[601, 119, 0 ],
[602, 121, 0 ],
[603, 526, 0 ],
[607, 127, 0 ],
[608, 127, 0 ],
[609, 529, 0 ],
[610, 530, 0 ],
[612, 493, 0 ],
[613, 130, 0 ],
[614, 130, 0 ],
[616, 132, 0 ],
[617, 133, 0 ],
[618, 133, 0 ],
[619, 134, 0 ],
[621, 136, 0 ],
[623, 139, 0 ],
[624, 14, 0 ],
[628, 142, 0 ],
[629, 145, 0 ],
[631, 145, 0 ],
[632, 145, 0 ],
[637, 148, 0 ],
[638, 149, 0 ],
[639, 150, 0 ],
[640, 153, 0 ],
[641, 155, 0 ],
[642, 533, 0 ],
[643, 534, 0 ],
[646, 536, 0 ],
[647, 536, 0 ],
[650, 166, 0 ],
[652, 167, 0 ],
[655, 170, 0 ],
[657, 174, 0 ],
[658, 175, 0 ],
[661, 177, 0 ],
[662, 178, 0 ],
[663, 178, 0 ],
[666, 180, 0 ],
[668, 183, 0 ],
[670, 183, 0 ],
[672, 185, 0 ],
[676, 19, 0 ],
[677, 190, 0 ],
[678, 194, 0 ],
[679, 196, 0 ],
[681, 197, 0 ],
[683, 200, 0 ],
[687, 202, 0 ],
[689, 204, 0 ],
[691, 209, 0 ],
[693, 21, 0 ],
[694, 21, 0 ],
[695, 210, 0 ],
[696, 211, 0 ],
[697, 211, 0 ],
[698, 212, 0 ],
[699, 213, 0 ],
[700, 214, 0 ],
[701, 215, 0 ],
[702, 215, 0 ],
[704, 217, 0 ],
[705, 217, 0 ],
[707, 219, 0 ],
[708, 221, 0 ],
[711, 224, 0 ],
[713, 225, 0 ],
[714, 225, 0 ],
[716, 226, 0 ],
[717, 227, 0 ],
[719, 229, 0 ],
[721, 545, 0 ],
[722, 545, 0 ],
[723, 235, 0 ],
[724, 238, 0 ],
[725, 239, 0 ],
[726, 240, 0 ],
[727, 243, 0 ],
[728, 244, 0 ],
[730, 547, 0 ],
[731, 548, 0 ],
[732, 247, 0 ],
[733, 549, 0 ],
[735, 253, 0 ],
[736, 256, 0 ],
[737, 256, 0 ],
[738, 258, 0 ],
[739, 264, 0 ],
[741, 264, 0 ],
[742, 264, 0 ],
[743, 500, 0 ],
[745, 273, 0 ],
[746, 273, 0 ],
[747, 273, 0 ],
[748, 274, 0 ],
[749, 274, 0 ],
[750, 557, 0 ],
[758, 286, 0 ],
[760, 287, 0 ],
[761, 288, 0 ],
[762, 289, 0 ],
[763, 560, 0 ],
[765, 560, 0 ],
[767, 292, 0 ],
[769, 293, 0 ],
[771, 297, 0 ],
[772, 3, 0 ],
[774, 300, 0 ],
[775, 300, 0 ],
[776, 300, 0 ],
[777, 300, 0 ],
[778, 300, 0 ],
[779, 302, 0 ],
[781, 303, 0 ],
[784, 563, 0 ],
[785, 501, 0 ],
[786, 31, 0 ],
[787, 308, 0 ],
[788, 311, 0 ],
[789, 565, 0 ],
[791, 314, 0 ],
[792, 316, 0 ],
[793, 318, 0 ],
[794, 319, 0 ],
[795, 319, 0 ],
[796, 567, 0 ],
[798, 324, 0 ],
[800, 326, 0 ],
[801, 327, 0 ],
[802, 327, 0 ],
[805, 328, 0 ],
[806, 328, 0 ],
[808, 329, 0 ],
[809, 329, 0 ],
[811, 568, 0 ],
[814, 570, 0 ],
[816, 335, 0 ],
[817, 571, 0 ],
[818, 34, 0 ],
[821, 338, 0 ],
[822, 339, 0 ],
[825, 339, 0 ],
[826, 339, 0 ],
[830, 345, 0 ],
[833, 348, 0 ],
[834, 572, 0 ],
[835, 572, 0 ],
[836, 572, 0 ],
[837, 350, 0 ],
[839, 350, 0 ],
[840, 573, 0 ],
[841, 573, 0 ],
[843, 352, 0 ],
[844, 352, 0 ],
[845, 356, 0 ],
[848, 574, 0 ],
[849, 574, 0 ],
[850, 574, 0 ],
[851, 575, 0 ],
[852, 361, 0 ],
[853, 362, 0 ],
[855, 363, 0 ],
[856, 363, 0 ],
[857, 365, 0 ],
[858, 368, 0 ],
[859, 368, 0 ],
[860, 371, 0 ],
[862, 372, 0 ],
[863, 374, 0 ],
[864, 374, 0 ],
[865, 375, 0 ],
[866, 376, 0 ],
[867, 376, 0 ],
[869, 503, 0 ],
[870, 503, 0 ],
[872, 378, 0 ],
[873, 576, 0 ],
[874, 576, 0 ],
[875, 381, 0 ],
[876, 578, 0 ],
[877, 578, 0 ],
[881, 388, 0 ],
[882, 388, 0 ],
[883, 388, 0 ],
[885, 393, 0 ],
[886, 394, 0 ],
[888, 397, 0 ],
[889, 397, 0 ],
[890, 40, 0 ],
[895, 580, 0 ],
[896, 581, 0 ],
[897, 403, 0 ],
[898, 403, 0 ],
[899, 405, 0 ],
[900, 405, 0 ],
[902, 405, 0 ],
[903, 406, 0 ],
[905, 413, 0 ],
[906, 414, 0 ],
[907, 583, 0 ],
[909, 417, 0 ],
[913, 422, 0 ],
[915, 423, 0 ],
[917, 43, 0 ],
[918, 424, 0 ],
[920, 428, 0 ],
[921, 428, 0 ],
[922, 429, 0 ],
[923, 432, 0 ],
[924, 433, 0 ],
[925, 44, 0 ],
[928, 435, 0 ],
[931, 439, 0 ],
[934, 45, 0 ],
[935, 45, 0 ],
[936, 445, 0 ],
[937, 447, 0 ],
[939, 450, 0 ],
[940, 451, 0 ],
[942, 458, 0 ],
[944, 458, 0 ],
[945, 459, 0 ],
[948, 462, 0 ],
[950, 462, 0 ],
[952, 47, 0 ],
[956, 478, 0 ],
[957, 478, 0 ],
[958, 478, 0 ],
[959, 478, 0 ],
[960, 479, 0 ],
[963, 481, 0 ],
[965, 49, 0 ],
[966, 49, 0 ],
[967, 49, 0 ],
[968, 486, 0 ],
[969, 486, 0 ],
[971, 51, 0 ],
[972, 506, 0 ],
[973, 506, 0 ],
[975, 58, 0 ],
[976, 58, 0 ],
[977, 59, 0 ],
[978, 491, 0 ],
[981, 62, 0 ],
[982, 62, 0 ],
[983, 62, 0 ],
[984, 63, 0 ],
[985, 63, 0 ],
[986, 64, 0 ],
[987, 65, 0 ],
[988, 66, 0 ],
[990, 67, 0 ],
[993, 67, 0 ],
[994, 67, 0 ],
[995, 509, 0 ],
[996, 510, 0 ],
[997, 510, 0 ],
[998, 70, 0 ],
[999, 70, 0 ],
[1000, 71, 0 ],
[1002, 71, 0 ],
[1003, 72, 0 ],
[1007, 511, 0 ],
[1008, 75, 0 ],
[1010, 79, 0 ],
[1011, 79, 0 ],
[1012, 81, 0 ],
[1018, 514, 0 ],
[1019, 514, 0 ],
[1023, 515, 0 ],
[1025, 518, 0 ],
[1026, 518, 0 ],
[1027, 218, 0 ],
[1028, 221, 0 ],
[1029, 268, 0 ],
[1030, 269, 0 ],
[1031, 498, 0 ],
[1032, 1, 0 ],
[1033, 3, 0 ],
[1034, 4, 0 ],
[1035, 6, 0 ],
[1036, 7, 0 ],
[1037, 8, 0 ],
[1038, 9, 0 ],
[1039, 11, 0 ],
[1041, 16, 0 ],
[1042, 17, 0 ],
[1044, 21, 0 ],
[1046, 25, 0 ],
[1047, 27, 0 ],
[1048, 28, 0 ],
[1049, 29, 0 ],
[1050, 31, 0 ],
[1051, 33, 0 ],
[1052, 34, 0 ],
[1053, 35, 0 ],
[1054, 36, 0 ],
[1055, 38, 0 ],
[1056, 39, 0 ],
[1057, 40, 0 ],
[1058, 41, 0 ],
[1059, 43, 0 ],
[1060, 44, 0 ],
[1061, 45, 0 ],
[1062, 47, 0 ],
[1063, 48, 0 ],
[1064, 49, 0 ],
[1065, 50, 0 ],
[1066, 51, 0 ],
[1067, 53, 0 ],
[1072, 59, 0 ],
[1073, 60, 0 ],
[1074, 62, 0 ],
[1077, 65, 0 ],
[1079, 67, 0 ],
[1080, 70, 0 ],
[1081, 71, 0 ],
[1082, 72, 0 ],
[1083, 73, 0 ],
[1084, 75, 0 ],
[1085, 76, 0 ],
[1086, 77, 0 ],
[1087, 79, 0 ],
[1088, 80, 0 ],
[1089, 81, 0 ],
[1090, 82, 0 ],
[1091, 83, 0 ],
[1092, 84, 0 ],
[1093, 85, 0 ],
[1094, 88, 0 ],
[1095, 89, 0 ],
[1096, 90, 0 ],
[1097, 91, 0 ],
[1098, 92, 0 ],
[1099, 93, 0 ],
[1101, 98, 0 ],
[1102, 101, 0 ],
[1103, 102, 0 ],
[1104, 103, 0 ],
[1105, 108, 0 ],
[1106, 109, 0 ],
[1107, 110, 0 ],
[1108, 111, 0 ],
[1109, 112, 0 ],
[1110, 113, 0 ],
[1111, 114, 0 ],
[1112, 115, 0 ],
[1113, 116, 0 ],
[1114, 118, 0 ],
[1115, 119, 0 ],
[1116, 121, 0 ],
[1117, 122, 0 ],
[1118, 126, 0 ],
[1119, 127, 0 ],
[1120, 130, 0 ],
[1121, 131, 0 ],
[1122, 132, 0 ],
[1123, 133, 0 ],
[1124, 134, 0 ],
[1125, 135, 0 ],
[1126, 136, 0 ],
[1127, 137, 0 ],
[1128, 139, 0 ],
[1129, 140, 0 ],
[1130, 141, 0 ],
[1131, 142, 0 ],
[1132, 144, 0 ],
[1133, 145, 0 ],
[1134, 146, 0 ],
[1135, 147, 0 ],
[1136, 148, 0 ],
[1137, 149, 0 ],
[1138, 150, 0 ],
[1139, 151, 0 ],
[1140, 152, 0 ],
[1141, 153, 0 ],
[1142, 154, 0 ],
[1143, 155, 0 ],
[1144, 158, 0 ],
[1145, 161, 0 ],
[1146, 162, 0 ],
[1147, 163, 0 ],
[1148, 164, 0 ],
[1149, 166, 0 ],
[1150, 167, 0 ],
[1151, 168, 0 ],
[1152, 169, 0 ],
[1153, 170, 0 ],
[1154, 171, 0 ],
[1155, 172, 0 ],
[1156, 173, 0 ],
[1157, 174, 0 ],
[1158, 175, 0 ],
[1159, 176, 0 ],
[1160, 177, 0 ],
[1161, 178, 0 ],
[1162, 179, 0 ],
[1163, 180, 0 ],
[1164, 181, 0 ],
[1165, 182, 0 ],
[1166, 183, 0 ],
[1167, 185, 0 ],
[1168, 186, 0 ],
[1169, 187, 0 ],
[1170, 188, 0 ],
[1173, 192, 0 ],
[1174, 193, 0 ],
[1175, 194, 0 ],
[1176, 196, 0 ],
[1177, 197, 0 ],
[1178, 198, 0 ],
[1179, 199, 0 ],
[1180, 200, 0 ],
[1181, 202, 0 ],
[1182, 203, 0 ],
[1183, 204, 0 ],
[1184, 205, 0 ],
[1185, 206, 0 ],
[1186, 207, 0 ],
[1187, 208, 0 ],
[1188, 209, 0 ],
[1189, 210, 0 ],
[1190, 211, 0 ],
[1191, 212, 0 ],
[1196, 217, 0 ],
[1197, 218, 0 ],
[1198, 219, 0 ],
[1199, 221, 0 ],
[1200, 222, 0 ],
[1203, 225, 0 ],
[1204, 226, 0 ],
[1211, 237, 0 ],
[1212, 238, 0 ],
[1213, 239, 0 ],
[1214, 240, 0 ],
[1215, 241, 0 ],
[1216, 242, 0 ],
[1217, 243, 0 ],
[1218, 244, 0 ],
[1219, 247, 0 ],
[1220, 251, 0 ],
[1221, 252, 0 ],
[1222, 253, 0 ],
[1225, 256, 0 ],
[1226, 257, 0 ],
[1228, 260, 0 ],
[1229, 263, 0 ],
[1230, 264, 0 ],
[1231, 266, 0 ],
[1232, 267, 0 ],
[1233, 268, 0 ],
[1235, 271, 0 ],
[1236, 272, 0 ],
[1237, 273, 0 ],
[1238, 274, 0 ],
[1239, 275, 0 ],
[1240, 276, 0 ],
[1241, 278, 0 ],
[1242, 281, 0 ],
[1243, 282, 0 ],
[1244, 283, 0 ],
[1245, 284, 0 ],
[1246, 285, 0 ],
[1247, 286, 0 ],
[1248, 287, 0 ],
[1249, 288, 0 ],
[1250, 289, 0 ],
[1251, 291, 0 ],
[1252, 292, 0 ],
[1253, 293, 0 ],
[1254, 294, 0 ],
[1255, 295, 0 ],
[1256, 296, 0 ],
[1257, 297, 0 ],
[1258, 298, 0 ],
[1259, 299, 0 ],
[1260, 300, 0 ],
[1261, 302, 0 ],
[1267, 311, 0 ],
[1274, 321, 0 ],
[1275, 322, 0 ],
[1276, 323, 0 ],
[1277, 324, 0 ],
[1278, 325, 0 ],
[1282, 329, 0 ],
[1283, 331, 0 ],
[1287, 338, 0 ],
[1288, 339, 0 ],
[1289, 340, 0 ],
[1290, 341, 0 ],
[1291, 342, 0 ],
[1292, 343, 0 ],
[1293, 344, 0 ],
[1294, 345, 0 ],
[1295, 346, 0 ],
[1300, 353, 0 ],
[1301, 354, 0 ],
[1302, 355, 0 ],
[1303, 356, 0 ],
[1306, 361, 0 ],
[1307, 362, 0 ],
[1308, 363, 0 ],
[1312, 367, 0 ],
[1317, 372, 0 ],
[1319, 374, 0 ],
[1323, 378, 0 ],
[1326, 384, 0 ],
[1327, 385, 0 ],
[1328, 386, 0 ],
[1331, 390, 0 ],
[1336, 395, 0 ],
[1337, 396, 0 ],
[1339, 398, 0 ],
[1340, 399, 0 ],
[1346, 407, 0 ],
[1348, 410, 0 ],
[1349, 411, 0 ],
[1356, 419, 0 ],
[1357, 420, 0 ],
[1359, 422, 0 ],
[1360, 423, 0 ],
[1361, 424, 0 ],
[1362, 425, 0 ],
[1363, 426, 0 ],
[1364, 427, 0 ],
[1365, 428, 0 ],
[1366, 429, 0 ],
[1372, 435, 0 ],
[1373, 436, 0 ],
[1374, 437, 0 ],
[1375, 438, 0 ],
[1376, 439, 0 ],
[1377, 440, 0 ],
[1378, 441, 0 ],
[1379, 442, 0 ],
[1380, 443, 0 ],
[1381, 445, 0 ],
[1382, 446, 0 ],
[1383, 447, 0 ],
[1384, 448, 0 ],
[1385, 449, 0 ],
[1386, 450, 0 ],
[1387, 451, 0 ],
[1388, 453, 0 ],
[1389, 454, 0 ],
[1390, 455, 0 ],
[1391, 456, 0 ],
[1392, 457, 0 ],
[1393, 458, 0 ],
[1394, 459, 0 ],
[1395, 460, 0 ],
[1396, 461, 0 ],
[1397, 462, 0 ],
[1398, 463, 0 ],
[1399, 464, 0 ],
[1400, 465, 0 ],
[1401, 466, 0 ],
[1402, 467, 0 ],
[1403, 468, 0 ],
[1404, 469, 0 ],
[1405, 470, 0 ],
[1406, 471, 0 ],
[1407, 472, 0 ],
[1408, 473, 0 ],
[1409, 474, 0 ],
[1410, 475, 0 ],
[1411, 476, 0 ],
[1412, 477, 0 ],
[1413, 478, 0 ],
[1414, 479, 0 ],
[1418, 483, 0 ],
[1419, 484, 0 ],
[1421, 486, 0 ],
[1422, 487, 0 ],
[1423, 488, 0 ],
[1424, 489, 0 ],
[1425, 490, 0 ],
[1426, 491, 0 ],
[1427, 492, 0 ],
[1428, 493, 0 ],
[1431, 496, 0 ],
[1432, 497, 0 ],
[1433, 498, 0 ],
[1434, 499, 0 ],
[1435, 500, 0 ],
[1436, 501, 0 ],
[1437, 502, 0 ],
[1438, 503, 0 ],
[1439, 504, 0 ],
[1440, 505, 0 ],
[1443, 508, 0 ],
[1446, 511, 0 ],
[1447, 512, 0 ],
[1448, 513, 0 ],
[1449, 514, 0 ],
[1450, 515, 0 ],
[1451, 516, 0 ],
[1452, 517, 0 ],
[1453, 518, 0 ],
[1454, 519, 0 ],
[1455, 520, 0 ],
[1456, 521, 0 ],
[1457, 522, 0 ],
[1458, 523, 0 ],
[1459, 524, 0 ],
[1460, 525, 0 ],
[1461, 526, 0 ],
[1462, 527, 0 ],
[1463, 528, 0 ],
[1464, 529, 0 ],
[1465, 530, 0 ],
[1466, 531, 0 ],
[1467, 532, 0 ],
[1468, 533, 0 ],
[1469, 534, 0 ],
[1470, 535, 0 ],
[1471, 536, 0 ],
[1472, 537, 0 ],
[1473, 538, 0 ],
[1474, 539, 0 ],
[1475, 540, 0 ],
[1476, 541, 0 ],
[1477, 542, 0 ],
[1482, 547, 0 ],
[1483, 548, 0 ],
[1484, 549, 0 ],
[1485, 550, 0 ],
[1486, 551, 0 ],
[1489, 555, 0 ],
[1490, 556, 0 ],
[1491, 557, 0 ],
[1492, 558, 0 ],
[1493, 559, 0 ],
[1494, 560, 0 ],
[1495, 561, 0 ],
[1500, 566, 0 ],
[1501, 567, 0 ],
[1503, 569, 0 ],
[1504, 570, 0 ],
[1512, 578, 0 ],
[1513, 579, 0 ],
[1518, 584, 0 ],
[1519, 585, 0 ],
[1, 490, 0 ],
[3, 4, 1 ],
[491, 6, 0 ],
[7, 5, 0 ],
[8, 9, 0 ],
[492, 11, 0 ],
[11, 493, 0 ],
[492, 493, 1 ],
[494, 14, 0 ],
[13, 15, 0 ],
[16, 5, 0 ],
[17, 18, 1 ],
[17, 12, 0 ],
[14, 495, 0 ],
[494, 19, 0 ],
[20, 21, 0 ],
[20, 22, 1 ],
[497, 23, 0 ],
[23, 499, 1 ],
[25, 26, 0 ],
[25, 22, 0 ],
[23, 27, 0 ],
[28, 23, 0 ],
[8, 21, 0 ],
[9, 29, 0 ],
[30, 25, 1 ],
[31, 32, 1 ],
[32, 33, 1 ],
[34, 35, 0 ],
[35, 36, 0 ],
[490, 6, 1 ],
[37, 10, 1 ],
[10, 38, 0 ],
[37, 38, 1 ],
[39, 40, 1 ],
[39, 41, 1 ],
[42, 41, 1 ],
[18, 42, 1 ],
[492, 43, 1 ],
[44, 45, 0 ],
[44, 505, 0 ],
[46, 12, 0 ],
[47, 48, 0 ],
[49, 50, 0 ],
[31, 33, 1 ],
[31, 51, 0 ],
[52, 53, 1 ],
[52, 54, 0 ],
[506, 55, 0 ],
[506, 507, 1 ],
[57, 506, 0 ],
[57, 58, 0 ],
[58, 506, 0 ],
[59, 60, 1 ],
[508, 62, 0 ],
[30, 61, 1 ],
[63, 506, 0 ],
[13, 64, 0 ],
[65, 66, 1 ],
[59, 67, 0 ],
[61, 67, 0 ],
[68, 69, 1 ],
[70, 69, 1 ],
[71, 72, 1 ],
[73, 74, 1 ],
[37, 75, 1 ],
[72, 75, 0 ],
[37, 72, 1 ],
[76, 77, 1 ],
[77, 51, 0 ],
[73, 72, 1 ],
[18, 40, 1 ],
[492, 45, 1 ],
[10, 74, 1 ],
[45, 511, 1 ],
[78, 32, 1 ],
[79, 80, 0 ],
[81, 79, 1 ],
[34, 82, 0 ],
[83, 84, 0 ],
[83, 499, 0 ],
[85, 86, 0 ],
[87, 86, 1 ],
[88, 89, 0 ],
[90, 86, 1 ],
[91, 86, 0 ],
[86, 92, 0 ],
[86, 93, 0 ],
[94, 86, 1 ],
[86, 95, 1 ],
[513, 517, 0 ],
[97, 66, 1 ],
[42, 98, 0 ],
[99, 100, 1 ],
[42, 101, 0 ],
[102, 42, 1 ],
[103, 87, 0 ],
[104, 103, 0 ],
[105, 87, 0 ],
[106, 107, 0 ],
[108, 107, 0 ],
[109, 106, 0 ],
[110, 111, 1 ],
[87, 112, 0 ],
[113, 87, 0 ],
[87, 85, 1 ],
[110, 114, 1 ],
[115, 116, 0 ],
[117, 118, 0 ],
[117, 119, 0 ],
[117, 120, 1 ],
[121, 122, 0 ],
[123, 124, 0 ],
[125, 126, 0 ],
[127, 119, 0 ],
[118, 128, 0 ],
[121, 119, 0 ],
[530, 527, 0 ],
[125, 130, 0 ],
[125, 123, 0 ],
[131, 132, 0 ],
[133, 123, 0 ],
[524, 134, 0 ],
[135, 136, 0 ],
[123, 131, 0 ],
[117, 128, 1 ],
[137, 521, 0 ],
[531, 514, 0 ],
[139, 521, 0 ],
[140, 514, 0 ],
[522, 141, 0 ],
[142, 523, 0 ],
[530, 526, 0 ],
[140, 532, 0 ],
[142, 144, 0 ],
[140, 522, 0 ],
[145, 146, 0 ],
[147, 523, 0 ],
[144, 523, 0 ],
[139, 523, 0 ],
[140, 141, 0 ],
[528, 526, 0 ],
[528, 148, 0 ],
[149, 150, 0 ],
[145, 528, 0 ],
[530, 151, 0 ],
[524, 152, 0 ],
[149, 525, 1 ],
[139, 514, 0 ],
[126, 120, 1 ],
[530, 153, 0 ],
[528, 147, 1 ],
[528, 154, 0 ],
[130, 120, 1 ],
[528, 155, 1 ],
[524, 533, 0 ],
[524, 149, 0 ],
[154, 150, 0 ],
[157, 110, 1 ],
[119, 158, 0 ],
[159, 60, 0 ],
[536, 161, 0 ],
[115, 151, 0 ],
[162, 134, 0 ],
[115, 526, 0 ],
[138, 87, 0 ],
[123, 163, 0 ],
[112, 164, 0 ],
[112, 165, 0 ],
[166, 165, 0 ],
[167, 537, 0 ],
[168, 104, 0 ],
[531, 520, 0 ],
[139, 520, 0 ],
[520, 169, 0 ],
[168, 105, 0 ],
[520, 170, 0 ],
[171, 89, 0 ],
[521, 172, 0 ],
[123, 173, 0 ],
[521, 174, 0 ],
[37, 39, 0 ],
[530, 175, 0 ],
[530, 176, 0 ],
[88, 530, 0 ],
[177, 496, 1 ],
[178, 525, 0 ],
[179, 493, 1 ],
[180, 181, 1 ],
[182, 180, 0 ],
[179, 181, 0 ],
[180, 493, 1 ],
[183, 30, 0 ],
[183, 21, 0 ],
[538, 185, 0 ],
[538, 89, 0 ],
[184, 186, 0 ],
[184, 187, 0 ],
[520, 172, 0 ],
[89, 175, 0 ],
[185, 89, 0 ],
[89, 188, 0 ],
[189, 190, 0 ],
[539, 172, 0 ],
[504, 192, 0 ],
[105, 186, 0 ],
[105, 187, 0 ],
[539, 193, 0 ],
[187, 194, 0 ],
[539, 540, 0 ],
[539, 196, 0 ],
[197, 540, 0 ],
[110, 198, 0 ],
[197, 539, 0 ],
[199, 537, 0 ],
[134, 526, 0 ],
[200, 193, 0 ],
[4, 201, 1 ],
[202, 86, 0 ],
[85, 203, 0 ],
[147, 204, 0 ],
[147, 205, 0 ],
[123, 206, 0 ],
[537, 207, 0 ],
[165, 208, 0 ],
[4, 94, 1 ],
[4, 2, 0 ],
[209, 4, 0 ],
[119, 163, 0 ],
[210, 3, 0 ],
[99, 211, 0 ],
[99, 69, 1 ],
[212, 99, 0 ],
[213, 214, 0 ],
[510, 215, 0 ],
[128, 69, 1 ],
[216, 69, 1 ],
[217, 98, 0 ],
[504, 218, 0 ],
[177, 504, 1 ],
[219, 209, 0 ],
[219, 220, 0 ],
[94, 95, 1 ],
[159, 221, 1 ],
[34, 161, 0 ],
[222, 221, 0 ],
[211, 52, 1 ],
[215, 223, 1 ],
[224, 215, 0 ],
[225, 224, 1 ],
[224, 223, 0 ],
[226, 6, 0 ],
[7, 3, 1 ],
[216, 227, 1 ],
[228, 229, 0 ],
[227, 230, 0 ],
[231, 53, 1 ],
[544, 545, 0 ],
[234, 235, 1 ],
[546, 214, 1 ],
[233, 227, 0 ],
[237, 238, 0 ],
[212, 100, 0 ],
[519, 239, 0 ],
[238, 519, 0 ],
[213, 240, 0 ],
[241, 242, 1 ],
[70, 241, 0 ],
[509, 213, 0 ],
[68, 243, 0 ],
[243, 244, 0 ],
[68, 244, 0 ],
[544, 547, 1 ],
[245, 227, 1 ],
[246, 208, 0 ],
[112, 208, 0 ],
[165, 247, 0 ],
[537, 549, 0 ],
[537, 550, 0 ],
[537, 551, 0 ],
[110, 251, 0 ],
[510, 252, 1 ],
[529, 253, 1 ],
[237, 239, 1 ],
[254, 238, 1 ],
[69, 255, 0 ],
[510, 225, 1 ],
[256, 257, 0 ],
[258, 190, 0 ],
[258, 259, 0 ],
[260, 261, 1 ],
[554, 553, 1 ],
[515, 263, 0 ],
[14, 264, 1 ],
[116, 555, 0 ],
[151, 116, 0 ],
[111, 114, 1 ],
[77, 111, 0 ],
[266, 525, 0 ],
[267, 120, 1 ],
[268, 269, 0 ],
[556, 271, 0 ],
[556, 272, 0 ],
[529, 273, 0 ],
[128, 274, 0 ],
[34, 275, 0 ],
[503, 276, 0 ],
[503, 504, 1 ],
[177, 218, 1 ],
[277, 278, 1 ],
[557, 558, 1 ],
[557, 559, 1 ],
[559, 558, 1 ],
[277, 78, 1 ],
[277, 279, 1 ],
[78, 279, 0 ],
[281, 282, 0 ],
[283, 161, 1 ],
[268, 161, 1 ],
[256, 284, 0 ],
[515, 516, 1 ],
[263, 516, 0 ],
[516, 285, 0 ],
[63, 286, 0 ],
[287, 516, 0 ],
[8, 102, 1 ],
[8, 101, 1 ],
[80, 288, 0 ],
[80, 289, 0 ],
[276, 560, 0 ],
[37, 290, 0 ],
[290, 74, 1 ],
[512, 291, 0 ],
[78, 292, 1 ],
[199, 548, 0 ],
[491, 293, 0 ],
[4, 294, 0 ],
[490, 541, 1 ],
[491, 295, 0 ],
[491, 296, 0 ],
[295, 297, 0 ],
[508, 161, 0 ],
[117, 123, 0 ],
[133, 117, 0 ],
[71, 74, 1 ],
[74, 278, 1 ],
[298, 515, 0 ],
[5, 299, 0 ],
[32, 292, 1 ],
[5, 29, 1 ],
[503, 560, 0 ],
[300, 301, 1 ],
[51, 300, 0 ],
[244, 302, 1 ],
[31, 302, 1 ],
[51, 282, 1 ],
[303, 304, 0 ],
[305, 304, 0 ],
[305, 259, 0 ],
[306, 307, 1 ],
[305, 308, 0 ],
[305, 309, 0 ],
[310, 309, 1 ],
[306, 309, 1 ],
[311, 280, 0 ],
[280, 278, 1 ],
[311, 32, 1 ],
[13, 312, 1 ],
[313, 314, 0 ],
[312, 313, 1 ],
[547, 566, 1 ],
[245, 315, 1 ],
[312, 316, 0 ],
[312, 314, 0 ],
[554, 546, 1 ],
[262, 216, 1 ],
[317, 233, 0 ],
[318, 317, 0 ],
[231, 52, 1 ],
[319, 567, 0 ],
[557, 321, 0 ],
[277, 65, 1 ],
[322, 288, 1 ],
[322, 323, 0 ],
[277, 324, 1 ],
[324, 325, 0 ],
[277, 325, 0 ],
[326, 327, 0 ],
[328, 326, 1 ],
[328, 327, 1 ],
[326, 329, 0 ],
[568, 329, 1 ],
[568, 326, 0 ],
[332, 78, 1 ],
[333, 306, 0 ],
[332, 333, 0 ],
[332, 334, 0 ],
[66, 334, 1 ],
[330, 335, 1 ],
[336, 66, 0 ],
[330, 336, 1 ],
[68, 70, 0 ],
[509, 337, 1 ],
[324, 288, 0 ],
[338, 559, 0 ],
[339, 559, 0 ],
[339, 340, 1 ],
[559, 340, 1 ],
[341, 292, 0 ],
[557, 342, 0 ],
[558, 343, 0 ],
[502, 340, 1 ],
[72, 32, 1 ],
[344, 345, 0 ],
[346, 47, 0 ],
[46, 47, 0 ],
[346, 345, 0 ],
[347, 328, 0 ],
[347, 348, 1 ],
[571, 348, 1 ],
[347, 572, 0 ],
[571, 570, 1 ],
[14, 350, 0 ],
[350, 573, 0 ],
[15, 351, 1 ],
[352, 15, 0 ],
[15, 335, 1 ],
[232, 227, 0 ],
[565, 544, 1 ],
[235, 567, 1 ],
[567, 286, 0 ],
[353, 519, 0 ],
[354, 353, 0 ],
[355, 354, 0 ],
[354, 356, 0 ],
[357, 358, 0 ],
[574, 359, 0 ],
[235, 575, 0 ],
[167, 361, 0 ],
[528, 362, 0 ],
[363, 344, 0 ],
[259, 364, 1 ],
[54, 56, 0 ],
[365, 364, 0 ],
[231, 366, 0 ],
[30, 367, 0 ],
[61, 367, 1 ],
[254, 368, 0 ],
[254, 369, 0 ],
[254, 370, 0 ],
[99, 358, 0 ],
[354, 519, 0 ],
[571, 371, 0 ],
[207, 372, 0 ],
[57, 373, 0 ],
[209, 374, 0 ],
[375, 376, 0 ],
[376, 377, 0 ],
[16, 49, 0 ],
[318, 377, 0 ],
[378, 297, 0 ],
[562, 379, 0 ],
[576, 563, 0 ],
[576, 381, 0 ],
[577, 576, 1 ],
[244, 383, 0 ],
[244, 306, 1 ],
[383, 306, 1 ],
[380, 306, 0 ],
[252, 225, 0 ],
[220, 76, 0 ],
[542, 384, 0 ],
[385, 384, 0 ],
[542, 385, 0 ],
[386, 385, 0 ],
[387, 578, 0 ],
[332, 388, 1 ],
[382, 332, 1 ],
[382, 388, 0 ],
[579, 578, 0 ],
[577, 387, 1 ],
[144, 390, 0 ],
[37, 49, 0 ],
[391, 233, 0 ],
[392, 310, 0 ],
[260, 393, 0 ],
[394, 230, 0 ],
[395, 282, 1 ],
[395, 244, 0 ],
[25, 396, 1 ],
[81, 74, 0 ],
[278, 80, 1 ],
[81, 278, 1 ],
[569, 570, 0 ],
[397, 552, 0 ],
[542, 398, 0 ],
[398, 385, 0 ],
[399, 499, 0 ],
[83, 399, 0 ],
[498, 400, 0 ],
[518, 239, 1 ],
[575, 543, 0 ],
[401, 360, 0 ],
[580, 581, 0 ],
[401, 402, 0 ],
[403, 231, 0 ],
[189, 360, 1 ],
[234, 404, 0 ],
[235, 404, 1 ],
[235, 580, 0 ],
[216, 259, 0 ],
[405, 259, 0 ],
[405, 318, 0 ],
[406, 230, 0 ],
[542, 407, 0 ],
[23, 408, 0 ],
[577, 348, 0 ],
[562, 564, 1 ],
[582, 507, 0 ],
[27, 410, 0 ],
[501, 27, 0 ],
[27, 411, 0 ],
[411, 410, 0 ],
[403, 360, 0 ],
[412, 360, 0 ],
[326, 413, 0 ],
[414, 413, 0 ],
[6, 297, 0 ],
[554, 580, 1 ],
[262, 401, 1 ],
[499, 556, 1 ],
[224, 229, 0 ],
[583, 507, 0 ],
[415, 307, 0 ],
[416, 507, 0 ],
[284, 561, 0 ],
[543, 417, 0 ],
[418, 506, 0 ],
[220, 157, 0 ],
[295, 419, 0 ],
[295, 420, 0 ],
[541, 62, 0 ],
[52, 421, 0 ],
[60, 160, 0 ],
[535, 161, 0 ],
[267, 282, 0 ],
[52, 365, 0 ],
[28, 27, 0 ],
[30, 201, 1 ],
[422, 81, 0 ],
[119, 425, 0 ],
[423, 425, 0 ],
[424, 425, 0 ],
[426, 428, 0 ],
[427, 428, 0 ],
[19, 428, 1 ],
[45, 429, 0 ],
[44, 429, 0 ],
[505, 429, 0 ],
[231, 431, 1 ],
[190, 431, 1 ],
[430, 431, 0 ],
[286, 433, 0 ],
[432, 433, 0 ],
[506, 433, 0 ],
[23, 434, 0 ],
[400, 434, 0 ],
[500, 434, 0 ],
[32, 436, 0 ],
[435, 436, 0 ],
[78, 436, 1 ],
[86, 438, 1 ],
[437, 438, 0 ],
[221, 438, 0 ],
[207, 439, 0 ],
[516, 439, 0 ],
[513, 439, 0 ],
[181, 441, 1 ],
[440, 441, 0 ],
[504, 441, 1 ],
[135, 442, 0 ],
[109, 442, 0 ],
[112, 442, 0 ],
[113, 443, 0 ],
[132, 443, 0 ],
[107, 443, 0 ],
[444, 445, 0 ],
[112, 445, 0 ],
[109, 445, 0 ],
[119, 447, 1 ],
[100, 447, 1 ],
[446, 447, 0 ],
[124, 448, 0 ],
[125, 448, 0 ],
[131, 448, 0 ],
[449, 450, 0 ],
[173, 450, 0 ],
[184, 450, 0 ],
[144, 451, 0 ],
[140, 451, 0 ],
[514, 451, 0 ],
[537, 585, 1 ],
[141, 585, 0 ],
[584, 585, 0 ],
[522, 454, 0 ],
[144, 454, 0 ],
[453, 454, 0 ],
[199, 456, 0 ],
[140, 456, 0 ],
[455, 456, 0 ],
[537, 456, 0 ],
[538, 457, 0 ],
[153, 457, 0 ],
[176, 457, 0 ],
[524, 459, 0 ],
[458, 459, 0 ],
[134, 459, 0 ],
[460, 461, 0 ],
[150, 461, 0 ],
[149, 461, 0 ],
[521, 463, 0 ],
[462, 463, 0 ],
[538, 463, 0 ],
[110, 464, 0 ],
[90, 464, 0 ],
[165, 464, 0 ],
[458, 465, 0 ],
[134, 465, 0 ],
[524, 465, 0 ],
[466, 467, 0 ],
[110, 467, 0 ],
[165, 467, 0 ],
[468, 469, 0 ],
[541, 469, 0 ],
[490, 469, 0 ],
[263, 471, 0 ],
[470, 471, 0 ],
[534, 471, 0 ],
[136, 472, 0 ],
[110, 472, 0 ],
[251, 472, 0 ],
[226, 474, 0 ],
[473, 474, 0 ],
[257, 474, 0 ],
[6, 474, 1 ],
[299, 475, 1 ],
[3, 475, 0 ],
[210, 475, 0 ],
[297, 476, 0 ],
[296, 476, 0 ],
[295, 476, 0 ],
[313, 478, 1 ],
[477, 478, 0 ],
[245, 478, 0 ],
[479, 481, 0 ],
[565, 481, 0 ],
[480, 481, 0 ],
[415, 482, 0 ],
[56, 482, 0 ],
[409, 482, 0 ],
[483, 484, 0 ],
[3, 484, 0 ],
[301, 484, 0 ],
[233, 485, 0 ],
[392, 485, 0 ],
[391, 485, 0 ],
[579, 488, 0 ],
[486, 488, 0 ],
[487, 488, 0 ],
[270, 489, 0 ],
[331, 489, 0 ],
[396, 489, 1 ],
[519, 253, 0 ],
[382, 349, 1 ],
[349, 351, 0 ],
[459, 465, 0 ],
[549, 550, 0 ],
[550, 551, 0 ],
[194, 195, 0 ],
[247, 248, 0 ],
[2, 294, 0 ],
[549, 551, 0 ],
[54, 365, 0 ],
[131, 265, 0 ],
[91, 92, 0 ],
[247, 249, 0 ],
[186, 191, 0 ],
[129, 173, 0 ],
[96, 202, 0 ],
[53, 320, 0 ],
[24, 396, 0 ],
[133, 156, 0 ],
[442, 452, 0 ],
[445, 452, 0 ],
[247, 250, 0 ],
[187, 195, 0 ],
[216, 236, 0 ],
[244, 389, 0 ],
[394, 406, 0 ],
[442, 445, 0 ],
[442, 444, 0 ],
[198, 472, 0 ],
[464, 467, 0 ],
[198, 251, 0 ],
[112, 143, 0 ],
[2, 490, 0 ],
[5, 491, 0 ],
[10, 492, 0 ],
[12, 493, 0 ],
[13, 494, 0 ],
[15, 495, 0 ],
[18, 496, 0 ],
[20, 497, 0 ],
[22, 498, 0 ],
[24, 499, 0 ],
[26, 500, 0 ],
[30, 501, 0 ],
[32, 502, 0 ],
[37, 503, 0 ],
[42, 504, 0 ],
[46, 505, 0 ],
[52, 506, 0 ],
[56, 507, 0 ],
[61, 508, 0 ],
[68, 509, 0 ],
[69, 510, 0 ],
[74, 511, 0 ],
[78, 512, 0 ],
[86, 513, 0 ],
[87, 514, 0 ],
[94, 515, 0 ],
[95, 516, 0 ],
[96, 517, 0 ],
[99, 518, 0 ],
[100, 519, 0 ],
[104, 520, 0 ],
[105, 521, 0 ],
[106, 522, 0 ],
[107, 523, 0 ],
[117, 524, 0 ],
[120, 525, 0 ],
[123, 526, 0 ],
[124, 527, 0 ],
[125, 528, 0 ],
[128, 529, 0 ],
[129, 530, 0 ],
[138, 531, 0 ],
[143, 532, 0 ],
[156, 533, 0 ],
[157, 534, 0 ],
[159, 535, 0 ],
[160, 536, 0 ],
[165, 537, 0 ],
[184, 538, 0 ],
[191, 539, 0 ],
[195, 540, 0 ],
[201, 541, 0 ],
[220, 542, 0 ],
[231, 543, 0 ],
[232, 544, 0 ],
[233, 545, 0 ],
[236, 546, 0 ],
[245, 547, 0 ],
[246, 548, 0 ],
[248, 549, 0 ],
[249, 550, 0 ],
[250, 551, 0 ],
[259, 552, 0 ],
[261, 553, 0 ],
[262, 554, 0 ],
[265, 555, 0 ],
[270, 556, 0 ],
[277, 557, 0 ],
[279, 558, 0 ],
[280, 559, 0 ],
[290, 560, 0 ],
[301, 561, 0 ],
[305, 562, 0 ],
[306, 563, 0 ],
[310, 564, 0 ],
[313, 565, 0 ],
[315, 566, 0 ],
[320, 567, 0 ],
[330, 568, 0 ],
[332, 569, 0 ],
[334, 570, 0 ],
[336, 571, 0 ],
[349, 572, 0 ],
[351, 573, 0 ],
[358, 574, 0 ],
[360, 575, 0 ],
[380, 576, 0 ],
[382, 577, 0 ],
[383, 578, 0 ],
[389, 579, 0 ],
[401, 580, 0 ],
[402, 581, 0 ],
[409, 582, 0 ],
[415, 583, 0 ],
[444, 584, 0 ],
[452, 585, 0 ]
])
ppc["parameters"] = {
"x_trans_sg": 0.003,
"x_trans_fm": 0.001,
"x_trans_fl": 0.001,
"d_l": 1e-3,
"d_l_perturb": 1e-5,
"w_1_ij": 1,
"w_2_ij": 1,
"w_3_ij": 1,
"w_4_ij": 1,
"b_r": 238,
"b_c": 248 }
return ppc
|
[
"numpy.array"
] |
[((115, 79219), 'numpy.array', 'array', (['[[586, 3, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [589, 2, 0, 0, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [590, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, \n 0, 1.1, 0.9], [593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [594,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [595, 2, 0, 0, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [597, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [599, 2,\n 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [601, 2, 0, 0, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [602, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [603, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [607, 2, 0, \n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [608, 2, 0, 0, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [609, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [610, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [612, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [613, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [614, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [616, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [617, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [618, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [619, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [621, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [623, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [624, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [628, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [629, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [631, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [632, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [637, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [638, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [639, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [640, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [641, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [642, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [643, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [646, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [647, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [650, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [652, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [655, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [657, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [658, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [661, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [662, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [663, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [666, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [668, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [670, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [672, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [676, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [677, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [678, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [679, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [681, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [683, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [687, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [689, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [691, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [693, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [694, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [695, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [696, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [697, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [698, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [699, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [700, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [701, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [702, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [704, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [705, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [707, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [708, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [711, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [713, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [714, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [716, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [717, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [719, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [721, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [722, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [723, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [724, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [725, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [726, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [727, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [728, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [730, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [731, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [732, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [733, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [735, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [736, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [737, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [739, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [741, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [742, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [745, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [746, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [747, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [749, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [750, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [758, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [760, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [761, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [762, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [763, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [765, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [767, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [769, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [771, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [772, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [774, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [775, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [776, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [777, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [778, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [779, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [781, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [784, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [785, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [786, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [787, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [788, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [789, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [791, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [792, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [793, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [794, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [795, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [796, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [800, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [801, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [802, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [806, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [808, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [809, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [811, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [814, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [816, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [817, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [818, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [821, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [822, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [825, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [826, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [830, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [833, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [834, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [835, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [836, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [837, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [839, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [840, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [841, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [843, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [844, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [848, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [849, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [850, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [851, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [852, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [853, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [855, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [856, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [857, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [858, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [859, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [860, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [862, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [863, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [864, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [865, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [866, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [867, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [869, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [870, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [872, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [873, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [874, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [875, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [876, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [877, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [881, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [882, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [883, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [885, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [886, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [888, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [889, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [890, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [895, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [897, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [898, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [899, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [900, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [902, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [903, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [905, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [906, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [907, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [909, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [913, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [915, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [917, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [918, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [920, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [922, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [923, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [924, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [928, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [931, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [934, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [935, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [936, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [937, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [939, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [942, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [944, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [948, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [950, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [952, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [956, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [957, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [958, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [959, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [960, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [963, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [965, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [966, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [967, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [968, 2, 0, 0, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1, 0.9], [969, 2, 0,\n 0, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1, 0.9], [971, 2, 0, 0, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [972, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [973, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [975, 2,\n 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [976, 2, 0, 0, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [977, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [978, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [981, 2, 0, \n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [982, 2, 0, 0, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [983, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [984, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [985, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [986, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [987, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [988, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [990, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [993, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [994, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [995, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [996, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [997, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [998, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [999, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1000, 2, 0, 0,\n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1002, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [1003, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [1007, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1008, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1010, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1011, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1012, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1018, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1019, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1023, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1025, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1026, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1027, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1028, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1029, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1030, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1031, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1032, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1033, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1034, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1035, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1036, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1037, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1038, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1039, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1041, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1042, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1044, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1046, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1047, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1048, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1049, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1050, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1051, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1052, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1053, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1054, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1055, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1056, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1057, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1058, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1059, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1060, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1061, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1062, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1063, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1064, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1065, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1066, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1067, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1072, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1073, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1074, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1077, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1079, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1080, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1081, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1082, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1083, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1084, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1085, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1086, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1087, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1088, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1089, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1090, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1091, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1092, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1093, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1094, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1095, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1096, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1097, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1098, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1099, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1101, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1102, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1103, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1104, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1105, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1106, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1107, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1108, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1109, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1110, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1111, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1112, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1113, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1114, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1115, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1116, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1117, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1118, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1119, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1120, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1121, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1122, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1123, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1124, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1125, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1126, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1127, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1128, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1129, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1130, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1131, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1132, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1133, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1134, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1135, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1136, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1137, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1138, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1139, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1140, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1141, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1142, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1143, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1144, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1145, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1146, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1147, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1148, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1149, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1150, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1151, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1152, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1153, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1154, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1155, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1156, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1157, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1158, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1159, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1160, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1161, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1162, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1163, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1164, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1165, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1166, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1167, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1168, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1169, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1170, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1173, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1174, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1175, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1176, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1177, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1178, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1179, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1180, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1181, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1182, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1183, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1184, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1185, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1186, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1187, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1188, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1189, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1190, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1191, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1196, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1197, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1198, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1199, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1200, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1203, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1204, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1211, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1212, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1213, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1214, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1215, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1216, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1217, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1218, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1219, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1220, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1221, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1222, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1225, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1226, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1228, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1229, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1230, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1231, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1232, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1233, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1235, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1236, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1237, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1238, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1239, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1240, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1241, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1242, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1243, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1244, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1245, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1246, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1247, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1248, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1249, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1250, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1251, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1252, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1253, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1254, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1255, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1256, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1257, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1258, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1259, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1260, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1261, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1267, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1274, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1275, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1276, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1277, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1278, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1282, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1283, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1287, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1288, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1289, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1290, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1291, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1292, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1293, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1294, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1295, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1300, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1301, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1302, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1303, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1306, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1307, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1308, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1312, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1317, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1319, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1323, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1326, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1327, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1328, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1331, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1336, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1337, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1339, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1340, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1346, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1348, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1349, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1356, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1357, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1359, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1360, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1361, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1362, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1363, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1364, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1365, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1366, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1372, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1373, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1374, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1375, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1376, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1377, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1378, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1379, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1380, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1381, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1382, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1383, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1384, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1385, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1386, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1387, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1388, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1389, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1390, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1391, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1392, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1393, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1394, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1395, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1396, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1397, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1398, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1399, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1400, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1401, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1402, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1403, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1404, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1405, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1406, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1407, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1408, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1409, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1410, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1411, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1412, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1413, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1414, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1418, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1419, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1421, 2, 0, 0, 0, 0, 0, \n 0.999529, 0, 220.0, 0, 1.1, 0.9], [1422, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [1423, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [1424, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1425, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1426, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1427, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1428, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1431, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1432, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1433, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1434, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1435, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1436, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1437, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1438, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1439, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1440, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1443, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1446, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1447, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1448, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1449, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1450, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1451, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1452, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1453, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1454, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1455, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1456, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1457, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1458, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1459, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1460, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1461, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1462, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1463, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1464, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1465, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1466, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1467, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1468, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1469, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1470, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1471, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1472, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1473, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1474, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1475, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1476, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1477, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1482, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1483, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1484, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1485, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1486, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1489, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1490, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1491, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1492, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1493, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1494, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1495, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1500, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1501, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1503, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1504, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1512, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1513, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1518, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1519, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1, 1, 313.588363, 62.717673, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [2, 1, 0, 0, 0, 0, 0, 1.000014, 0, 380.0, 0, 1.1, 0.9], [3, 1, \n 54.963605, 10.992721, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [4, 1, \n 90.389472, 18.077894, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [5, 1, 0, 0,\n 0, 0, 0, 0.99988, 0, 380.0, 0, 1.1, 0.9], [6, 1, 265.420957, 53.084191,\n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [7, 1, 200.027697, 40.005539, 0, \n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [8, 1, 167.368885, 33.473777, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [9, 1, 113.188962, 22.637792, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [10, 1, 0, 0, 0, 0, 0, 0.99817, 0, 380.0, \n 0, 1.1, 0.9], [11, 1, 99.172825, 19.834565, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [12, 1, 0, 0, 0, 0, 0, 1.000621, 0, 380.0, 0, 1.1, 0.9], [13,\n 1, 0, 0, 0, 0, 0, 1.000047, 0, 380.0, 0, 1.1, 0.9], [14, 1, 237.185017,\n 47.437003, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [15, 1, 0, 0, 0, 0, 0,\n 1.000288, 0, 380.0, 0, 1.1, 0.9], [16, 1, 404.510393, 80.902079, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [17, 1, 95.272824, 19.054565, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [18, 1, 0, 0, 0, 0, 0, 1.000595, 0, 380.0,\n 0, 1.1, 0.9], [19, 1, 235.383233, 47.076647, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [20, 1, 0, 0, 0, 0, 0, 0.996189, 0, 380.0, 0, 1.1, 0.9], [21,\n 1, 1012.184007, 202.436801, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [22, \n 1, 0, 0, 0, 0, 0, 0.999173, 0, 380.0, 0, 1.1, 0.9], [23, 1, 132.529206,\n 26.505841, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [24, 1, 0, 0, 0, 0, 0,\n 0.999969, 0, 380.0, 0, 1.1, 0.9], [25, 1, 63.389643, 12.677929, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [26, 1, 0, 0, 0, 0, 0, 1.000136, 0, 380.0,\n 0, 1.1, 0.9], [27, 1, 77.812541, 15.562508, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [28, 1, 229.912748, 45.98255, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [29, 1, 84.451739, 16.890348, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [30, 1, 0, 0, 0, 0, 0, 0.998963, 0, 380.0, 0, 1.1, 0.9], [31, 1, \n 166.198842, 33.239768, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [32, 1, 0,\n 0, 0, 0, 0, 0.999676, 0, 380.0, 0, 1.1, 0.9], [33, 1, 208.382115, \n 41.676423, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [34, 1, 41.342034, \n 8.268407, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [35, 1, 2.737061, \n 0.547412, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [36, 1, 9.062016, \n 1.812403, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [37, 1, 0, 0, 0, 0, 0, \n 1.002866, 0, 380.0, 0, 1.1, 0.9], [38, 1, 218.324197, 43.664839, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [39, 1, 71.489926, 14.297985, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [40, 1, 74.673464, 14.934693, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [41, 1, 80.257049, 16.05141, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [42, 1, 0, 0, 0, 0, 0, 1.000642, 0, 380.0, 0, 1.1,\n 0.9], [43, 1, 123.077802, 24.61556, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [44, 1, 157.459799, 31.49196, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [45, 1, 83.583183, 16.716637, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [46,\n 1, 0, 0, 0, 0, 0, 1.000156, 0, 380.0, 0, 1.1, 0.9], [47, 1, 363.426388,\n 72.685278, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [48, 1, 249.807245, \n 49.961449, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [49, 1, 63.18863, \n 12.637726, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [50, 1, 92.011174, \n 18.402235, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [51, 1, 119.240475, \n 23.848095, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [52, 1, 0, 0, 0, 0, 0,\n 1.000169, 0, 380.0, 0, 1.1, 0.9], [53, 1, 180.928324, 36.185665, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [54, 1, 91.922123, 18.384425, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [55, 1, 90.148739, 18.029748, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [56, 1, 0, 0, 0, 0, 0, 0.999632, 0, 380.0, 0, \n 1.1, 0.9], [57, 1, 107.609434, 21.521887, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [58, 1, 246.495776, 49.299155, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [59, 1, 70.4007, 14.08014, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [60, 1, 37.117202, 7.42344, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [61, 1, 0, 0, 0, 0, 0, 0.999641, 0, 380.0, 0, 1.1, 0.9], [62, 1, \n 282.97336, 56.594672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [63, 1, \n 167.036752, 33.40735, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [64, 1, \n 1772.598444, 354.519689, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [65, 1, \n 5.906328, 1.181266, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [66, 1, \n 187.401044, 37.480209, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [67, 1, \n 402.006808, 80.401362, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [68, 1, 0,\n 0, 0, 0, 0, 0.998739, 0, 380.0, 0, 1.1, 0.9], [69, 1, 0, 0, 0, 0, 0, \n 0.99974, 0, 380.0, 0, 1.1, 0.9], [70, 1, 760.505189, 152.101038, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [71, 1, 176.73161, 35.346322, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [72, 1, 289.462126, 57.892425, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [73, 1, 92.667734, 18.533547, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [74, 1, 0, 0, 0, 0, 0, 1.001013, 0, 380.0, 0, \n 1.1, 0.9], [75, 1, 115.496612, 23.099322, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [76, 1, 111.479571, 22.295914, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [77, 1, 107.975583, 21.595117, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [78, 1, 0, 0, 0, 0, 0, 0.998303, 0, 380.0, 0, 1.1, 0.9], [79,\n 1, 111.493111, 22.298622, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [80, 1,\n 118.422887, 23.684577, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [81, 1, \n 133.683311, 26.736662, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [82, 1, \n 4.449059, 0.889812, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [83, 1, \n 297.674862, 59.534972, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [84, 1, \n 29.304254, 5.860851, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [85, 1, \n 101.621462, 20.324292, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [86, 1, 0,\n 0, 0, 0, 0, 0.999961, 0, 380.0, 0, 1.1, 0.9], [87, 1, 0, 0, 0, 0, 0, \n 0.999704, 0, 380.0, 0, 1.1, 0.9], [88, 1, 82.021988, 16.404398, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [89, 1, 101.760831, 20.352166, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [90, 1, 117.529266, 23.505853, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [91, 1, 40.823815, 8.164763, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [92, 1, 44.553104, 8.910621, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [93, 1, 43.697669, 8.739534, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [94, 1, 0, 0, 0, 0, 0, 1.001239, 0, 380.0, 0, 1.1,\n 0.9], [95, 1, 0, 0, 0, 0, 0, 1.00079, 0, 380.0, 0, 1.1, 0.9], [96, 1, 0,\n 0, 0, 0, 0, 0.999999, 0, 380.0, 0, 1.1, 0.9], [97, 1, 6.145751, 1.22915,\n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [98, 1, 112.995638, 22.599128, 0,\n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [99, 1, 0, 0, 0, 0, 0, 1.000517, 0, \n 380.0, 0, 1.1, 0.9], [100, 1, 0, 0, 0, 0, 0, 1.002008, 0, 380.0, 0, 1.1,\n 0.9], [101, 1, 80.012435, 16.002487, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [102, 1, 154.867797, 30.973559, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [103, 1, 181.070422, 36.214084, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [104, 1, 0, 0, 0, 0, 0, 1.00004, 0, 380.0, 0, 1.1, 0.9], [105, 1,\n 0, 0, 0, 0, 0, 1.000015, 0, 380.0, 0, 1.1, 0.9], [106, 1, 0, 0, 0, 0, 0,\n 0.999888, 0, 380.0, 0, 1.1, 0.9], [107, 1, 0, 0, 0, 0, 0, 1.000005, 0, \n 380.0, 0, 1.1, 0.9], [108, 1, 127.723107, 25.544621, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [109, 1, 51.712907, 10.342581, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [110, 1, 67.125426, 13.425085, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [111, 1, 118.293138, 23.658628, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [112, 1, 59.871239, 11.974248, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [113, 1, 94.378762, 18.875752, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [114, 1, 138.996837, 27.799367, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [115, 1, 89.603089, 17.920618, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [116, 1, 149.938448, 29.98769, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [117, 1, 0, 0, 0, 0, 0, 1.000126, 0, 380.0, 0, 1.1,\n 0.9], [118, 1, 232.158232, 46.431646, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [119, 1, 45.001799, 9.00036, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [120, 1, 0, 0, 0, 0, 0, 1.000899, 0, 380.0, 0, 1.1, 0.9], [121, 1, \n 61.112463, 12.222493, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [122, 1, \n 53.503341, 10.700668, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [123, 1, 0,\n 0, 0, 0, 0, 1.000012, 0, 380.0, 0, 1.1, 0.9], [124, 1, 0, 0, 0, 0, 0, \n 1.000002, 0, 380.0, 0, 1.1, 0.9], [125, 1, 0, 0, 0, 0, 0, 0.999433, 0, \n 380.0, 0, 1.1, 0.9], [126, 1, 280.519344, 56.103869, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [127, 1, 216.870964, 43.374193, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [128, 1, 0, 0, 0, 0, 0, 1.000728, 0, 380.0, 0, 1.1,\n 0.9], [129, 1, 0, 0, 0, 0, 0, 0.999992, 0, 380.0, 0, 1.1, 0.9], [130, 1,\n 299.025609, 59.805122, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [131, 1, \n 66.024596, 13.204919, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [132, 1, \n 171.918065, 34.383613, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [133, 1, \n 57.585817, 11.517163, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [134, 1, \n 57.350004, 11.470001, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [135, 1, \n 57.42604, 11.485208, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [136, 1, \n 55.6303, 11.12606, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [137, 1, \n 44.499119, 8.899824, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [138, 1, 0, \n 0, 0, 0, 0, 0.999738, 0, 380.0, 0, 1.1, 0.9], [139, 1, 87.169265, \n 17.433853, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [140, 1, 60.281279, \n 12.056256, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [141, 1, 71.422675, \n 14.284535, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [142, 1, 78.590439, \n 15.718088, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [143, 1, 0, 0, 0, 0, 0,\n 0.999983, 0, 380.0, 0, 1.1, 0.9], [144, 1, 71.587765, 14.317553, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [145, 1, 208.250701, 41.65014, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [146, 1, 268.474329, 53.694866, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [147, 1, 164.558954, 32.911791, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [148, 1, 232.222895, 46.444579, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [149, 1, 149.71242, 29.942484, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [150, 1, 195.465109, 39.093022, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [151, 1, 46.061054, 9.212211, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [152, 1, 95.617972, 19.123594, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [153, 1, 170.598013, 34.119603, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [154, 1, 175.238014, 35.047603, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [155, 1, 182.525879, 36.505176, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [156, 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1,\n 0.9], [157, 1, 0, 0, 0, 0, 0, 1.001115, 0, 380.0, 0, 1.1, 0.9], [158, 1,\n 48.08949, 9.617898, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [159, 1, 0, 0,\n 0, 0, 0, 1.001058, 0, 380.0, 0, 1.1, 0.9], [160, 1, 0, 0, 0, 0, 0, \n 1.000009, 0, 380.0, 0, 1.1, 0.9], [161, 1, 149.290329, 29.858066, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [162, 1, 223.144798, 44.62896, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [163, 1, 44.626852, 8.92537, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [164, 1, 44.806324, 8.961265, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [165, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1,\n 0.9], [166, 1, 52.385841, 10.477168, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [167, 1, 73.693692, 14.738738, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [168, 1, 50.295004, 10.059001, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [169, 1, 172.174302, 34.43486, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [170, 1, 129.374469, 25.874894, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [171, 1, 110.421061, 22.084212, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [172, 1, 54.191648, 10.83833, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [173, 1, 51.769064, 10.353813, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [174, 1, 77.686815, 15.537363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 175, 1, 51.735134, 10.347027, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 176, 1, 180.277733, 36.055547, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 177, 1, 29.396911, 5.879382, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [178,\n 1, 155.693252, 31.13865, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [179, 1,\n 57.36759, 11.473518, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [180, 1, \n 50.427579, 10.085516, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [181, 1, \n 38.061285, 7.612257, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [182, 1, \n 1.724194, 0.344839, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [183, 1, \n 516.10549, 103.221098, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [184, 1, 0,\n 0, 0, 0, 0, 0.999161, 0, 380.0, 0, 1.1, 0.9], [185, 1, 110.366174, \n 22.073235, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [186, 1, 59.431612, \n 11.886322, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [187, 1, 34.761441, \n 6.952288, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [188, 1, 51.735134, \n 10.347027, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [189, 1, 189.835515, \n 37.967103, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [190, 1, 251.092986, \n 50.218597, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [191, 1, 0, 0, 0, 0, 0,\n 1.000011, 0, 380.0, 0, 1.1, 0.9], [192, 1, 60.470797, 12.094159, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [193, 1, 51.651681, 10.330336, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [194, 1, 35.655983, 7.131197, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [195, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [196, 1, 50.023265, 10.004653, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [197, 1, 79.255224, 15.851045, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [198, 1, 46.898997, 9.379799, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [199, 1, 60.380898, 12.07618, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [200, 1, 51.736334, 10.347267, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 201, 1, 0, 0, 0, 0, 0, 0.998603, 0, 380.0, 0, 1.1, 0.9], [202, 1, \n 53.015057, 10.603011, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [203, 1, \n 6.985209, 1.397042, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [204, 1, \n 204.735079, 40.947016, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [205, 1, \n 102.376756, 20.475351, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [206, 1, \n 49.133688, 9.826738, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [207, 1, \n 146.102427, 29.220485, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [208, 1, \n 43.021403, 8.604281, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [209, 1, \n 59.784717, 11.956943, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [210, 1, \n 68.68145, 13.73629, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [211, 1, \n 241.36201, 48.272402, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [212, 1, \n 60.493983, 12.098797, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [213, 1, \n 283.582271, 56.716454, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [214, 1, \n 190.814923, 38.162985, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [215, 1, \n 403.487677, 80.697535, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [216, 1, \n 136.050691, 27.210138, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [217, 1, \n 43.595473, 8.719095, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [218, 1, \n 132.815127, 26.563025, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [219, 1, \n 213.450096, 42.690019, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [220, 1, 0,\n 0, 0, 0, 0, 0.999455, 0, 380.0, 0, 1.1, 0.9], [221, 1, 121.763271, \n 24.352654, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [222, 1, 0.0, 0.0, 0, \n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [223, 1, 120.674939, 24.134988, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [224, 1, 140.32833, 28.065666, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [225, 1, 251.967566, 50.393513, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [226, 1, 88.020057, 17.604011, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [227, 1, 109.655139, 21.931028, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [228, 1, 107.513515, 21.502703, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [229, 1, 237.909071, 47.581814, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [230, 1, 57.064182, 11.412836, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [231, 1, 0, 0, 0, 0, 0, 1.000723, 0, 380.0,\n 0, 1.1, 0.9], [232, 1, 0, 0, 0, 0, 0, 0.999961, 0, 380.0, 0, 1.1, 0.9],\n [233, 1, 0, 0, 0, 0, 0, 0.99979, 0, 380.0, 0, 1.1, 0.9], [234, 1, \n 203.268961, 40.653792, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [235, 1, \n 66.100357, 13.220071, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [236, 1, 0,\n 0, 0, 0, 0, 0.999982, 0, 380.0, 0, 1.1, 0.9], [237, 1, 0.547056, \n 0.109411, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [238, 1, 74.793756, \n 14.958751, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [239, 1, 103.336953, \n 20.667391, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [240, 1, 651.829683, \n 130.365937, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [241, 1, 482.331322, \n 96.466264, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [242, 1, 175.625563, \n 35.125113, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [243, 1, 141.694807, \n 28.338961, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [244, 1, 168.818838, \n 33.763768, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [245, 1, 0, 0, 0, 0, 0,\n 1.001057, 0, 380.0, 0, 1.1, 0.9], [246, 1, 0, 0, 0, 0, 0, 1.000289, 0, \n 380.0, 0, 1.1, 0.9], [247, 1, 33.501144, 6.700229, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [248, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1,\n 0.9], [249, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0, 0, 1.1, 0.9], [250, 1,\n 0, 0, 0, 0, 0, 1.000004, 0, 380.0, 0, 1.1, 0.9], [251, 1, 83.142241, \n 16.628448, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [252, 1, 213.221814, \n 42.644363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [253, 1, 93.612513, \n 18.722503, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [254, 1, 29.888922, \n 5.977784, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [255, 1, 146.991227, \n 29.398245, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [256, 1, 168.57336, \n 33.714672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [257, 1, 81.357232, \n 16.271446, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [258, 1, 265.133395, \n 53.026679, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [259, 1, 0, 0, 0, 0, 0,\n 0.999297, 0, 380.0, 0, 1.1, 0.9], [260, 1, 165.00861, 33.001722, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [261, 1, 0, 0, 0, 0, 0, 1.001077, 0, \n 380.0, 0, 1.1, 0.9], [262, 1, 0, 0, 0, 0, 0, 1.001213, 0, 380.0, 0, 1.1,\n 0.9], [263, 1, 236.704636, 47.340927, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [264, 1, 306.426919, 61.285384, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [265, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9], [266, 1,\n 147.677362, 29.535472, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [267, 1, \n 186.779823, 37.355965, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [268, 1, \n 64.951259, 12.990252, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [269, 1, \n 52.158296, 10.431659, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [270, 1, 0,\n 0, 0, 0, 0, 1.000028, 0, 380.0, 0, 1.1, 0.9], [271, 1, 0.0, 0.0, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [272, 1, 1.064221, 0.212844, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [273, 1, 145.532771, 29.106554, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [274, 1, 282.896536, 56.579307, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [275, 1, 52.959776, 10.591955, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [276, 1, 206.450669, 41.290134, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [277, 1, 0, 0, 0, 0, 0, 0.998999, 0, 380.0,\n 0, 1.1, 0.9], [278, 1, 161.168499, 32.2337, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [279, 1, 0, 0, 0, 0, 0, 0.999351, 0, 380.0, 0, 1.1, 0.9], [\n 280, 1, 0, 0, 0, 0, 0, 0.999232, 0, 380.0, 0, 1.1, 0.9], [281, 1, \n 212.884285, 42.576857, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [282, 1, \n 301.051348, 60.21027, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [283, 1, \n 120.674452, 24.13489, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [284, 1, \n 183.068733, 36.613747, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [285, 1, \n 81.642233, 16.328447, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [286, 1, \n 171.108932, 34.221786, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [287, 1, \n 105.167309, 21.033462, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [288, 1, \n 67.642881, 13.528576, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [289, 1, \n 106.382633, 21.276527, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [290, 1, 0,\n 0, 0, 0, 0, 1.004328, 0, 380.0, 0, 1.1, 0.9], [291, 1, 70.009154, \n 14.001831, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [292, 1, 138.019839, \n 27.603968, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [293, 1, 121.642104, \n 24.328421, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [294, 1, 32.415782, \n 6.483156, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [295, 1, 67.825108, \n 13.565022, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [296, 1, 192.55564, \n 38.511128, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [297, 1, 202.378138, \n 40.475628, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [298, 1, 106.85968, \n 21.371936, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [299, 1, 103.492889, \n 20.698578, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [300, 1, 281.942654, \n 56.388531, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [301, 1, 0, 0, 0, 0, 0,\n 0.999155, 0, 380.0, 0, 1.1, 0.9], [302, 1, 237.502196, 47.500439, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [303, 1, 121.988016, 24.397603, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [304, 1, 104.751195, 20.950239, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [305, 1, 0, 0, 0, 0, 0, 0.999588, 0, 380.0,\n 0, 1.1, 0.9], [306, 1, 0, 0, 0, 0, 0, 1.001506, 0, 380.0, 0, 1.1, 0.9],\n [307, 1, 124.244651, 24.84893, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 308, 1, 153.177102, 30.63542, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 309, 1, 250.619279, 50.123856, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 310, 1, 0, 0, 0, 0, 0, 0.999979, 0, 380.0, 0, 1.1, 0.9], [311, 1, \n 212.878265, 42.575653, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [312, 1, \n 95.737279, 19.147456, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [313, 1, 0,\n 0, 0, 0, 0, 0.999814, 0, 380.0, 0, 1.1, 0.9], [314, 1, 296.533151, \n 59.30663, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [315, 1, 0, 0, 0, 0, 0,\n 1.00139, 0, 380.0, 0, 1.1, 0.9], [316, 1, 116.185543, 23.237109, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [317, 1, 156.439578, 31.287916, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [318, 1, 257.087979, 51.417596, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [319, 1, 9.209919, 1.841984, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [320, 1, 0, 0, 0, 0, 0, 0.999999, 0, 380.0, 0, \n 1.1, 0.9], [321, 1, 217.864189, 43.572838, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [322, 1, 27.735514, 5.547103, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [323, 1, 2.885644, 0.577129, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [324, 1, 510.112065, 102.022413, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [325, 1, 166.171204, 33.234241, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 326, 1, 13.472646, 2.694529, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [327,\n 1, 115.941313, 23.188263, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [328, 1,\n 197.581816, 39.516363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [329, 1, \n 297.180666, 59.436133, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [330, 1, 0,\n 0, 0, 0, 0, 1.001493, 0, 380.0, 0, 1.1, 0.9], [331, 1, 23.595134, \n 4.719027, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [332, 1, 0, 0, 0, 0, 0,\n 0.997873, 0, 380.0, 0, 1.1, 0.9], [333, 1, 247.920323, 49.584065, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [334, 1, 0, 0, 0, 0, 0, 0.999337, 0, \n 380.0, 0, 1.1, 0.9], [335, 1, 253.021394, 50.604279, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [336, 1, 0, 0, 0, 0, 0, 0.997899, 0, 380.0, 0, 1.1,\n 0.9], [337, 1, 100.644491, 20.128898, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [338, 1, 273.163452, 54.63269, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [339, 1, 168.947817, 33.789563, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [340, 1, 142.841965, 28.568393, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [341, 1, 129.13199, 25.826398, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [342, 1, 224.001511, 44.800302, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [343, 1, 122.890495, 24.578099, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [344, 1, 308.115905, 61.623181, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [345, 1, 336.91261, 67.382522, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [346, 1, 334.468328, 66.893666, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [347, 1, 116.96938, 23.393876, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [348, 1, 305.765662, 61.153132, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [349, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [350, 1, \n 160.409129, 32.081826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [351, 1, 0,\n 0, 0, 0, 0, 0.999777, 0, 380.0, 0, 1.1, 0.9], [352, 1, 1061.795233, \n 212.359047, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [353, 1, 3.192111, \n 0.638422, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [354, 1, 21.686927, \n 4.337385, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [355, 1, 0.0, 0.0, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [356, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [357, 1, 0.054362, 0.010872, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [358, 1, 0, 0, 0, 0, 0, 1.00115, 0, 380.0, 0, 1.1,\n 0.9], [359, 1, 3.174021, 0.634804, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [360, 1, 0, 0, 0, 0, 0, 1.000702, 0, 380.0, 0, 1.1, 0.9], [361, 1, \n 81.236209, 16.247242, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [362, 1, \n 231.565239, 46.313048, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [363, 1, \n 340.939078, 68.187816, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [364, 1, \n 80.439881, 16.087976, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [365, 1, \n 72.199065, 14.439813, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [366, 1, \n 143.098317, 28.619663, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [367, 1, \n 69.167782, 13.833556, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [368, 1, \n 34.059353, 6.811871, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [369, 1, \n 27.987713, 5.597543, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [370, 1, \n 82.396627, 16.479325, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [371, 1, \n 414.583549, 82.91671, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [372, 1, \n 240.422957, 48.084591, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [373, 1, \n 162.237585, 32.447517, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [374, 1, \n 83.192687, 16.638537, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [375, 1, \n 272.900898, 54.58018, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [376, 1, \n 299.320888, 59.864178, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [377, 1, \n 214.189404, 42.837881, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [378, 1, \n 213.777134, 42.755427, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [379, 1, \n 73.679821, 14.735964, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [380, 1, 0,\n 0, 0, 0, 0, 1.001413, 0, 380.0, 0, 1.1, 0.9], [381, 1, 246.389815, \n 49.277963, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [382, 1, 0, 0, 0, 0, 0,\n 0.999769, 0, 380.0, 0, 1.1, 0.9], [383, 1, 0, 0, 0, 0, 0, 0.999055, 0, \n 380.0, 0, 1.1, 0.9], [384, 1, 86.944845, 17.388969, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [385, 1, 109.741458, 21.948292, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [386, 1, 88.173972, 17.634794, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [387, 1, 179.569895, 35.913979, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [388, 1, 964.28771, 192.857542, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [389, 1, 0, 0, 0, 0, 0, 0.999912, 0, 380.0, 0, 1.1,\n 0.9], [390, 1, 79.618981, 15.923796, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [391, 1, 90.692809, 18.138562, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [392, 1, 174.038589, 34.807718, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [393, 1, 217.341636, 43.468327, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [394, 1, 78.171538, 15.634308, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [395, 1, 108.340922, 21.668184, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [396, 1, 76.736766, 15.347353, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [397, 1, 615.344336, 123.068867, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [398, 1, 266.518924, 53.303785, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [399, 1, 113.556473, 22.711295, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [400, 1, 60.500986, 12.100197, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [401, 1, 0, 0, 0, 0, 0, 1.000677, 0, 380.0, 0, 1.1, 0.9], [402, 1,\n 0, 0, 0, 0, 0, 1.000436, 0, 380.0, 0, 1.1, 0.9], [403, 1, 30.040147, \n 6.008029, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [404, 1, 105.833295, \n 21.166659, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [405, 1, 797.878416, \n 159.575683, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [406, 1, 60.453087, \n 12.090617, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [407, 1, 119.66821, \n 23.933642, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [408, 1, 346.013355, \n 69.202671, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [409, 1, 0, 0, 0, 0, 0,\n 0.999958, 0, 380.0, 0, 1.1, 0.9], [410, 1, 44.798315, 8.959663, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [411, 1, 42.358642, 8.471728, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [412, 1, 2.975231, 0.595046, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [413, 1, 148.528896, 29.705779, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [414, 1, 12.61171, 2.522342, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [415, 1, 0, 0, 0, 0, 0, 1.000312, 0, 380.0, 0, 1.1,\n 0.9], [416, 1, 179.604023, 35.920805, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [417, 1, 7.027563, 1.405513, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [418, 1, 146.450173, 29.290035, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 419, 1, 78.276577, 15.655315, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 420, 1, 78.808605, 15.761721, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 421, 1, 113.521787, 22.704357, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 422, 1, 83.169866, 16.633973, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 423, 1, 174.675097, 34.935019, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 424, 1, 12.593624, 2.518725, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [425,\n 1, 103.425433, 20.685087, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [426, 1,\n 8.569115, 1.713823, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [427, 1, \n 72.014986, 14.402997, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [428, 1, \n 32.289285, 6.457857, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [429, 1, \n 364.376918, 72.875384, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [430, 1, \n 194.091051, 38.81821, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [431, 1, \n 129.791667, 25.958333, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [432, 1, \n 151.718496, 30.343699, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [433, 1, \n 77.554451, 15.51089, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [434, 1, \n 40.363113, 8.072623, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [435, 1, \n 161.427044, 32.285409, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [436, 1, \n 86.18319, 17.236638, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [437, 1, \n 19.627317, 3.925463, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [438, 1, \n 52.674345, 10.534869, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [439, 1, \n 98.072822, 19.614564, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [440, 1, \n 82.881556, 16.576311, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [441, 1, \n 63.539817, 12.707963, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [442, 1, \n 84.084687, 16.816937, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [443, 1, \n 182.303517, 36.460703, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [444, 1, 0,\n 0, 0, 0, 0, 0.999997, 0, 380.0, 0, 1.1, 0.9], [445, 1, 82.836611, \n 16.567322, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [446, 1, 38.410594, \n 7.682119, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [447, 1, 73.026044, \n 14.605209, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [448, 1, 53.666726, \n 10.733345, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [449, 1, 270.605805, \n 54.121161, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [450, 1, 165.59784, \n 33.119568, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [451, 1, 70.760773, \n 14.152155, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [452, 1, 0, 0, 0, 0, 0,\n 0.999998, 0, 380.0, 0, 1.1, 0.9], [453, 1, 47.423447, 9.484689, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [454, 1, 33.085725, 6.617145, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [455, 1, 53.94349, 10.788698, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [456, 1, 53.94349, 10.788698, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [457, 1, 165.431157, 33.086231, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [458, 1, 157.345889, 31.469178, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [459, 1, 191.495801, 38.29916, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [460, 1, 251.664937, 50.332987, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [461, 1, 261.786107, 52.357221, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [462, 1, 80.081727, 16.016345, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [463, 1, 41.034378, 8.206876, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [464, 1, 41.083979, 8.216796, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [465, 1, 66.361856, 13.272371, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [466, 1, 53.877431, 10.775486, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [467, 1, 49.719948, 9.94399, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [468, 1, 81.521062, 16.304212, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [469, 1, 50.516969, 10.103394, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [470, 1, 128.647331, 25.729466, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [471, 1, 126.664898, 25.33298, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [472, 1, 44.303564, 8.860713, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [473, 1, 81.351906, 16.270381, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [474, 1, 42.017409, 8.403482, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [475, 1, 41.233718, 8.246744, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [476, 1, 46.600885, 9.320177, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [477, 1, 75.203749, 15.04075, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [478, 1, 94.469615, 18.893923, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [479, 1, 171.199924, 34.239985, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [480, 1, 75.040029, 15.008006, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [481, 1, 65.168234, 13.033647, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [482, 1, 73.995726, 14.799145, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [483, 1, 62.927942, 12.585588, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [484, 1, 49.332446, 9.866489, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [485, 1, 73.689618, 14.737924, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [486, 1, 677.908485, 135.581697, 0, 0, 0, 0.999529,\n 0, 220.0, 0, 1.1, 0.9], [487, 1, 171.778877, 34.355775, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [488, 1, 494.972714, 98.994543, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [489, 1, 130.27541, 26.055082, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [490, 1, 40.536849, 8.10737, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [491, 1, 55.738688, 11.147738, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [492, 1, 86.919491, 17.383898, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [493, 1, 112.02882, 22.405764, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [494, 1, 153.112663, 30.622533, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [495, 1, 120.527031, 24.105406, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [496, 1, 8.537131, 1.707426, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [497, 1, 1067.56553, 213.513106, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [498, 1, 50.067861, 10.013572, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [499, 1, 69.886531, 13.977306, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [500, 1, 38.262053, 7.652411, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [501, 1, 64.732797, 12.946559, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [502, 1, 255.486945, 51.097389, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [503, 1, 78.245685, 15.649137, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [504, 1, 51.23895, 10.24779, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [505, 1, 363.426388, 72.685278, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [506, 1, 114.075071, 22.815014, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [507, 1, 108.509533, 21.701907, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [508, 1, 157.749113, 31.549823, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [509, 1, 207.882042, 41.576408, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [510, 1, 131.331505, 26.266301, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [511, 1, 114.561196, 22.912239, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [512, 1, 75.674743, 15.134949, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [513, 1, 41.688708, 8.337742, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [514, 1, 103.75916, 20.751832, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [515, 1, 92.559335, 18.511867, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [516, 1, 103.552116, 20.710423, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [517, 1, 48.640908, 9.728182, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [518, 1, 273.948672, 54.789734, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [519, 1, 26.961565, 5.392313, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [520, 1, 108.854273, 21.770855, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [521, 1, 98.332374, 19.666475, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [522, 1, 84.192975, 16.838595, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [523, 1, 45.320121, 9.064024, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [524, 1, 131.541254, 26.308251, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [525, 1, 156.710188, 31.342038, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [526, 1, 47.511581, 9.502316, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [527, 1, 52.164378, 10.432876, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [528, 1, 113.853632, 22.770726, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [529, 1, 145.943497, 29.188699, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [530, 1, 61.844893, 12.368979, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [531, 1, 62.879916, 12.575983, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [532, 1, 60.353758, 12.070752, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [533, 1, 54.08425, 10.81685, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [534, 1, 149.194629, 29.838926, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [535, 1, 186.7821, 37.35642, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [536, 1, 147.224547, 29.444909, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [537, 1, 48.97554, 9.795108, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [538, 1, 36.610772, 7.322154, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [539, 1, 38.84628, 7.769256, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [540, 1, 34.979369, 6.995874, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [541, 1, 90.354729, 18.070946, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [542, 1, 124.119469, 24.823894, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [543, 1, 67.793444, 13.558689, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [544, 1, 126.266241, 25.253248, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [545, 1, 271.871924, 54.374385, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [546, 1, 136.266314, 27.253263, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [547, 1, 176.133164, 35.226633, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [548, 1, 57.015034, 11.403007, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [549, 1, 48.752729, 9.750546, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [550, 1, 40.229292, 8.045858, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [551, 1, 38.780067, 7.756013, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [552, 1, 192.577447, 38.515489, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [553, 1, 1.332338, 0.266468, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [554, 1, 195.101058, 39.020212, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [555, 1, 74.335663, 14.867133, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [556, 1, 114.999744, 22.999949, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [557, 1, 244.333084, 48.866617, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [558, 1, 144.073127, 28.814625, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [559, 1, 77.10655, 15.42131, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [560, 1, 120.458673, 24.091735, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [561, 1, 66.05602, 13.211204, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [562, 1, 180.460098, 36.09202, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [563, 1, 126.878156, 25.375631, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [564, 1, 250.521271, 50.104254, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [565, 1, 189.030713, 37.806143, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [566, 1, 0.303624, 0.060725, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [567, 1, 307.277901, 61.45558, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [568, 1, 284.157713, 56.831543, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [569, 1, 199.935363, 39.987073, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [570, 1, 312.135104, 62.427021, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [571, 1, 229.817616, 45.963523, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [572, 1, 405.359904, 81.071981, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [573, 1, 117.994953, 23.598991, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [574, 1, 224.825446, 44.965089, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [575, 1, 4.224873, 0.844975, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [576, 1, 273.386233, 54.677247, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [577, 1, 301.379822, 60.275964, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [578, 1, 287.747363, 57.549473, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [579, 1, 104.978091, 20.995618, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [580, 1, 21.854877, 4.370975, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [581, 1, 0.12558, 0.025116, 0, 0, 0, 1.0, 0, 220.0,\n 0, 1.1, 0.9], [582, 1, 79.071055, 15.814211, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [583, 1, 90.691594, 18.138319, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [584, 1, 52.034493, 10.406899, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [585, 1, 90.338282, 18.067656, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9]]'], {}), '([[586, 3, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [589, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [590, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [594, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [595, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [597, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [599, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [601, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [602, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [603, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [607, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [608, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [609, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [610, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [612, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [613, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [614, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [616, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [617, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [618, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [619, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [621, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [623, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [624, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [628, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [629, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [631, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [632, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [637, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [638, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [639, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [640, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [641, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [642, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [643, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [646, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [647, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [650, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [652, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [655, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [657, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [658, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [661, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [662, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [663, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [666, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [668, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [670, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [672, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [676, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [677, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [678, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [679, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [681, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [683, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [687, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [689, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [691, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [693, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [694, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [695, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [696, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [697, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [698, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [699, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [700, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [701, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [702, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [704, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [705, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [707, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [708, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [711, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [713, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [714, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [716, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [717, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [719, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [721, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [722, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [723, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [724, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [725, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [726, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [727, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [728, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [730, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [731, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [732, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [733, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [735, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [736, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [737, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [739, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [741, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [742, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [745, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [746, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [747, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [749, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [750, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [758, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [760, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [761, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [762, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [763, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [765, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [767, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [769, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [771, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [772, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [774, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [775, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [776, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [777, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [778, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [779, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [781, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [784, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [785, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [786, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [787, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [788, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [789, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [791, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [792, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [793, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [794, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [795, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [796, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [800, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [801, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [802, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [806, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [808, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [809, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [811, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [814, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [816, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [817, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [818, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [821, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [822, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [825, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [826, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [830, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [833, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [834, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [835, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [836, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [837, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [839, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [840, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [841, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [843, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [844, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [848, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [849, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [850, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [851, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [852, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [853, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [855, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [856, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [857, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [858, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [859, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [860, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [862, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [863, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [864, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [865, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [866, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [867, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [869, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [870, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [872, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [873, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [874, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [875, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [876, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [877, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [881, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [882, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [883, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [885, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [886, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [888, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [889, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [890, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [895, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [897, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [898, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [899, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [900, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [902, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [903, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [905, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [906, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [907, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [909, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [913, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [915, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [917, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [918, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [920, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [922, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [923, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [924, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [928, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [931, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [934, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [935, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [936, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [937, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [939, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [942, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [944, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [945, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [948, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [950, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [952, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [956, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [957, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [958, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [959, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [960, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [963, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [965, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [966, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [967, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [968, 2, 0, 0, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1,\n 0.9], [969, 2, 0, 0, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1, 0.9], [971, 2,\n 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [972, 2, 0, 0, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [973, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [975, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [976, 2, 0, \n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [977, 2, 0, 0, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [978, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [981, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [982, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [983, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [984, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [985, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [986, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [987, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [988, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [990, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [993, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [994, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [995, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [996, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [997, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [998, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [999, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [1000, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1002, 2, 0, 0,\n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1003, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [1007, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [1008, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1010, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1011, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1012, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1018, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1019, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1023, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1025, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1026, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1027, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1028, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1029, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1030, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1031, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1032, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1033, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1034, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1035, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1036, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1037, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1038, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1039, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1041, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1042, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1044, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1046, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1047, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1048, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1049, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1050, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1051, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1052, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1053, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1054, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1055, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1056, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1057, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1058, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1059, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1060, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1061, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1062, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1063, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1064, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1065, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1066, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1067, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1072, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1073, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1074, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1077, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1079, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1080, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1081, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1082, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1083, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1084, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1085, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1086, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1087, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1088, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1089, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1090, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1091, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1092, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1093, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1094, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1095, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1096, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1097, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1098, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1099, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1101, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1102, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1103, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1104, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1105, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1106, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1107, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1108, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1109, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1110, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1111, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1112, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1113, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1114, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1115, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1116, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1117, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1118, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1119, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1120, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1121, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1122, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1123, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1124, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1125, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1126, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1127, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1128, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1129, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1130, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1131, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1132, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1133, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1134, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1135, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1136, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1137, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1138, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1139, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1140, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1141, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1142, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1143, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1144, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1145, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1146, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1147, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1148, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1149, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1150, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1151, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1152, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1153, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1154, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1155, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1156, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1157, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1158, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1159, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1160, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1161, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1162, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1163, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1164, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1165, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1166, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1167, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1168, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1169, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1170, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1173, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1174, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1175, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1176, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1177, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1178, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1179, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1180, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1181, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1182, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1183, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1184, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1185, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1186, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1187, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1188, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1189, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1190, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1191, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1196, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1197, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1198, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1199, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1200, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1203, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1204, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1211, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1212, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1213, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1214, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1215, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1216, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1217, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1218, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1219, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1220, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1221, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1222, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1225, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1226, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1228, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1229, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1230, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1231, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1232, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1233, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1235, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1236, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1237, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1238, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1239, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1240, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1241, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1242, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1243, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1244, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1245, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1246, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1247, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1248, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1249, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1250, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1251, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1252, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1253, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1254, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1255, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1256, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1257, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1258, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1259, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1260, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1261, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1267, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1274, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1275, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1276, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1277, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1278, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1282, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1283, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1287, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1288, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1289, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1290, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1291, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1292, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1293, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1294, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1295, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1300, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1301, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1302, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1303, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1306, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1307, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1308, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1312, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1317, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1319, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1323, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1326, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1327, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1328, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1331, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1336, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1337, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1339, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1340, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1346, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1348, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1349, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1356, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1357, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1359, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1360, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1361, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1362, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1363, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1364, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1365, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1366, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1372, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1373, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1374, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1375, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1376, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1377, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1378, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1379, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1380, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1381, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1382, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1383, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1384, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1385, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1386, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1387, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1388, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1389, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1390, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1391, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1392, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1393, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1394, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1395, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1396, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1397, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1398, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1399, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1400, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1401, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1402, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1403, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1404, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1405, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1406, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1407, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1408, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1409, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1410, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1411, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1412, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1413, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1414, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1418, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1419, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1421, 2, 0,\n 0, 0, 0, 0, 0.999529, 0, 220.0, 0, 1.1, 0.9], [1422, 2, 0, 0, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [1423, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1424, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1425,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1426, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1427, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1428, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1431,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1432, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1433, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1434, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1435,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1436, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1437, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1438, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1439,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1440, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1443, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1446, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1447,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1448, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1449, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1450, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1451,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1452, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1453, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1454, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1455,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1456, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1457, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1458, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1459,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1460, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1461, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1462, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1463,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1464, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1465, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1466, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1467,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1468, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1469, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1470, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1471,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1472, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1473, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1474, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1475,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1476, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1477, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1482, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1483,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1484, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1485, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1486, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1489,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1490, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1491, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1492, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1493,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1494, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1495, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1500, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1501,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1503, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1504, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1512, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1513,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1518, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1519, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1, 1, 313.588363, 62.717673, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [2, 1, 0, 0, 0, 0, 0, 1.000014, 0, 380.0, 0, 1.1, 0.9], [3, 1, \n 54.963605, 10.992721, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [4, 1, \n 90.389472, 18.077894, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [5, 1, 0, 0,\n 0, 0, 0, 0.99988, 0, 380.0, 0, 1.1, 0.9], [6, 1, 265.420957, 53.084191,\n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [7, 1, 200.027697, 40.005539, 0, \n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [8, 1, 167.368885, 33.473777, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [9, 1, 113.188962, 22.637792, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [10, 1, 0, 0, 0, 0, 0, 0.99817, 0, 380.0, \n 0, 1.1, 0.9], [11, 1, 99.172825, 19.834565, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [12, 1, 0, 0, 0, 0, 0, 1.000621, 0, 380.0, 0, 1.1, 0.9], [13,\n 1, 0, 0, 0, 0, 0, 1.000047, 0, 380.0, 0, 1.1, 0.9], [14, 1, 237.185017,\n 47.437003, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [15, 1, 0, 0, 0, 0, 0,\n 1.000288, 0, 380.0, 0, 1.1, 0.9], [16, 1, 404.510393, 80.902079, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [17, 1, 95.272824, 19.054565, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [18, 1, 0, 0, 0, 0, 0, 1.000595, 0, 380.0,\n 0, 1.1, 0.9], [19, 1, 235.383233, 47.076647, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [20, 1, 0, 0, 0, 0, 0, 0.996189, 0, 380.0, 0, 1.1, 0.9], [21,\n 1, 1012.184007, 202.436801, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [22, \n 1, 0, 0, 0, 0, 0, 0.999173, 0, 380.0, 0, 1.1, 0.9], [23, 1, 132.529206,\n 26.505841, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [24, 1, 0, 0, 0, 0, 0,\n 0.999969, 0, 380.0, 0, 1.1, 0.9], [25, 1, 63.389643, 12.677929, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [26, 1, 0, 0, 0, 0, 0, 1.000136, 0, 380.0,\n 0, 1.1, 0.9], [27, 1, 77.812541, 15.562508, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [28, 1, 229.912748, 45.98255, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [29, 1, 84.451739, 16.890348, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [30, 1, 0, 0, 0, 0, 0, 0.998963, 0, 380.0, 0, 1.1, 0.9], [31, 1, \n 166.198842, 33.239768, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [32, 1, 0,\n 0, 0, 0, 0, 0.999676, 0, 380.0, 0, 1.1, 0.9], [33, 1, 208.382115, \n 41.676423, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [34, 1, 41.342034, \n 8.268407, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [35, 1, 2.737061, \n 0.547412, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [36, 1, 9.062016, \n 1.812403, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [37, 1, 0, 0, 0, 0, 0, \n 1.002866, 0, 380.0, 0, 1.1, 0.9], [38, 1, 218.324197, 43.664839, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [39, 1, 71.489926, 14.297985, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [40, 1, 74.673464, 14.934693, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [41, 1, 80.257049, 16.05141, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [42, 1, 0, 0, 0, 0, 0, 1.000642, 0, 380.0, 0, 1.1,\n 0.9], [43, 1, 123.077802, 24.61556, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [44, 1, 157.459799, 31.49196, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [45, 1, 83.583183, 16.716637, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [46,\n 1, 0, 0, 0, 0, 0, 1.000156, 0, 380.0, 0, 1.1, 0.9], [47, 1, 363.426388,\n 72.685278, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [48, 1, 249.807245, \n 49.961449, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [49, 1, 63.18863, \n 12.637726, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [50, 1, 92.011174, \n 18.402235, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [51, 1, 119.240475, \n 23.848095, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [52, 1, 0, 0, 0, 0, 0,\n 1.000169, 0, 380.0, 0, 1.1, 0.9], [53, 1, 180.928324, 36.185665, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [54, 1, 91.922123, 18.384425, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [55, 1, 90.148739, 18.029748, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [56, 1, 0, 0, 0, 0, 0, 0.999632, 0, 380.0, 0, \n 1.1, 0.9], [57, 1, 107.609434, 21.521887, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [58, 1, 246.495776, 49.299155, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [59, 1, 70.4007, 14.08014, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [60, 1, 37.117202, 7.42344, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [61, 1, 0, 0, 0, 0, 0, 0.999641, 0, 380.0, 0, 1.1, 0.9], [62, 1, \n 282.97336, 56.594672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [63, 1, \n 167.036752, 33.40735, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [64, 1, \n 1772.598444, 354.519689, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [65, 1, \n 5.906328, 1.181266, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [66, 1, \n 187.401044, 37.480209, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [67, 1, \n 402.006808, 80.401362, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [68, 1, 0,\n 0, 0, 0, 0, 0.998739, 0, 380.0, 0, 1.1, 0.9], [69, 1, 0, 0, 0, 0, 0, \n 0.99974, 0, 380.0, 0, 1.1, 0.9], [70, 1, 760.505189, 152.101038, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [71, 1, 176.73161, 35.346322, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [72, 1, 289.462126, 57.892425, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [73, 1, 92.667734, 18.533547, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [74, 1, 0, 0, 0, 0, 0, 1.001013, 0, 380.0, 0, \n 1.1, 0.9], [75, 1, 115.496612, 23.099322, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [76, 1, 111.479571, 22.295914, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [77, 1, 107.975583, 21.595117, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [78, 1, 0, 0, 0, 0, 0, 0.998303, 0, 380.0, 0, 1.1, 0.9], [79,\n 1, 111.493111, 22.298622, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [80, 1,\n 118.422887, 23.684577, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [81, 1, \n 133.683311, 26.736662, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [82, 1, \n 4.449059, 0.889812, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [83, 1, \n 297.674862, 59.534972, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [84, 1, \n 29.304254, 5.860851, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [85, 1, \n 101.621462, 20.324292, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [86, 1, 0,\n 0, 0, 0, 0, 0.999961, 0, 380.0, 0, 1.1, 0.9], [87, 1, 0, 0, 0, 0, 0, \n 0.999704, 0, 380.0, 0, 1.1, 0.9], [88, 1, 82.021988, 16.404398, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [89, 1, 101.760831, 20.352166, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [90, 1, 117.529266, 23.505853, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [91, 1, 40.823815, 8.164763, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [92, 1, 44.553104, 8.910621, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [93, 1, 43.697669, 8.739534, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [94, 1, 0, 0, 0, 0, 0, 1.001239, 0, 380.0, 0, 1.1,\n 0.9], [95, 1, 0, 0, 0, 0, 0, 1.00079, 0, 380.0, 0, 1.1, 0.9], [96, 1, 0,\n 0, 0, 0, 0, 0.999999, 0, 380.0, 0, 1.1, 0.9], [97, 1, 6.145751, 1.22915,\n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [98, 1, 112.995638, 22.599128, 0,\n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [99, 1, 0, 0, 0, 0, 0, 1.000517, 0, \n 380.0, 0, 1.1, 0.9], [100, 1, 0, 0, 0, 0, 0, 1.002008, 0, 380.0, 0, 1.1,\n 0.9], [101, 1, 80.012435, 16.002487, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [102, 1, 154.867797, 30.973559, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [103, 1, 181.070422, 36.214084, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [104, 1, 0, 0, 0, 0, 0, 1.00004, 0, 380.0, 0, 1.1, 0.9], [105, 1,\n 0, 0, 0, 0, 0, 1.000015, 0, 380.0, 0, 1.1, 0.9], [106, 1, 0, 0, 0, 0, 0,\n 0.999888, 0, 380.0, 0, 1.1, 0.9], [107, 1, 0, 0, 0, 0, 0, 1.000005, 0, \n 380.0, 0, 1.1, 0.9], [108, 1, 127.723107, 25.544621, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [109, 1, 51.712907, 10.342581, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [110, 1, 67.125426, 13.425085, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [111, 1, 118.293138, 23.658628, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [112, 1, 59.871239, 11.974248, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [113, 1, 94.378762, 18.875752, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [114, 1, 138.996837, 27.799367, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [115, 1, 89.603089, 17.920618, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [116, 1, 149.938448, 29.98769, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [117, 1, 0, 0, 0, 0, 0, 1.000126, 0, 380.0, 0, 1.1,\n 0.9], [118, 1, 232.158232, 46.431646, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [119, 1, 45.001799, 9.00036, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [120, 1, 0, 0, 0, 0, 0, 1.000899, 0, 380.0, 0, 1.1, 0.9], [121, 1, \n 61.112463, 12.222493, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [122, 1, \n 53.503341, 10.700668, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [123, 1, 0,\n 0, 0, 0, 0, 1.000012, 0, 380.0, 0, 1.1, 0.9], [124, 1, 0, 0, 0, 0, 0, \n 1.000002, 0, 380.0, 0, 1.1, 0.9], [125, 1, 0, 0, 0, 0, 0, 0.999433, 0, \n 380.0, 0, 1.1, 0.9], [126, 1, 280.519344, 56.103869, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [127, 1, 216.870964, 43.374193, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [128, 1, 0, 0, 0, 0, 0, 1.000728, 0, 380.0, 0, 1.1,\n 0.9], [129, 1, 0, 0, 0, 0, 0, 0.999992, 0, 380.0, 0, 1.1, 0.9], [130, 1,\n 299.025609, 59.805122, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [131, 1, \n 66.024596, 13.204919, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [132, 1, \n 171.918065, 34.383613, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [133, 1, \n 57.585817, 11.517163, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [134, 1, \n 57.350004, 11.470001, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [135, 1, \n 57.42604, 11.485208, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [136, 1, \n 55.6303, 11.12606, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [137, 1, \n 44.499119, 8.899824, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [138, 1, 0, \n 0, 0, 0, 0, 0.999738, 0, 380.0, 0, 1.1, 0.9], [139, 1, 87.169265, \n 17.433853, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [140, 1, 60.281279, \n 12.056256, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [141, 1, 71.422675, \n 14.284535, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [142, 1, 78.590439, \n 15.718088, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [143, 1, 0, 0, 0, 0, 0,\n 0.999983, 0, 380.0, 0, 1.1, 0.9], [144, 1, 71.587765, 14.317553, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [145, 1, 208.250701, 41.65014, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [146, 1, 268.474329, 53.694866, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [147, 1, 164.558954, 32.911791, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [148, 1, 232.222895, 46.444579, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [149, 1, 149.71242, 29.942484, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [150, 1, 195.465109, 39.093022, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [151, 1, 46.061054, 9.212211, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [152, 1, 95.617972, 19.123594, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [153, 1, 170.598013, 34.119603, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [154, 1, 175.238014, 35.047603, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [155, 1, 182.525879, 36.505176, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [156, 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1,\n 0.9], [157, 1, 0, 0, 0, 0, 0, 1.001115, 0, 380.0, 0, 1.1, 0.9], [158, 1,\n 48.08949, 9.617898, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [159, 1, 0, 0,\n 0, 0, 0, 1.001058, 0, 380.0, 0, 1.1, 0.9], [160, 1, 0, 0, 0, 0, 0, \n 1.000009, 0, 380.0, 0, 1.1, 0.9], [161, 1, 149.290329, 29.858066, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [162, 1, 223.144798, 44.62896, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [163, 1, 44.626852, 8.92537, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [164, 1, 44.806324, 8.961265, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [165, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1,\n 0.9], [166, 1, 52.385841, 10.477168, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [167, 1, 73.693692, 14.738738, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [168, 1, 50.295004, 10.059001, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [169, 1, 172.174302, 34.43486, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [170, 1, 129.374469, 25.874894, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [171, 1, 110.421061, 22.084212, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [172, 1, 54.191648, 10.83833, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [173, 1, 51.769064, 10.353813, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [174, 1, 77.686815, 15.537363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 175, 1, 51.735134, 10.347027, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 176, 1, 180.277733, 36.055547, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 177, 1, 29.396911, 5.879382, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [178,\n 1, 155.693252, 31.13865, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [179, 1,\n 57.36759, 11.473518, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [180, 1, \n 50.427579, 10.085516, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [181, 1, \n 38.061285, 7.612257, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [182, 1, \n 1.724194, 0.344839, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [183, 1, \n 516.10549, 103.221098, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [184, 1, 0,\n 0, 0, 0, 0, 0.999161, 0, 380.0, 0, 1.1, 0.9], [185, 1, 110.366174, \n 22.073235, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [186, 1, 59.431612, \n 11.886322, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [187, 1, 34.761441, \n 6.952288, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [188, 1, 51.735134, \n 10.347027, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [189, 1, 189.835515, \n 37.967103, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [190, 1, 251.092986, \n 50.218597, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [191, 1, 0, 0, 0, 0, 0,\n 1.000011, 0, 380.0, 0, 1.1, 0.9], [192, 1, 60.470797, 12.094159, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [193, 1, 51.651681, 10.330336, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [194, 1, 35.655983, 7.131197, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [195, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [196, 1, 50.023265, 10.004653, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [197, 1, 79.255224, 15.851045, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [198, 1, 46.898997, 9.379799, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [199, 1, 60.380898, 12.07618, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [200, 1, 51.736334, 10.347267, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 201, 1, 0, 0, 0, 0, 0, 0.998603, 0, 380.0, 0, 1.1, 0.9], [202, 1, \n 53.015057, 10.603011, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [203, 1, \n 6.985209, 1.397042, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [204, 1, \n 204.735079, 40.947016, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [205, 1, \n 102.376756, 20.475351, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [206, 1, \n 49.133688, 9.826738, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [207, 1, \n 146.102427, 29.220485, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [208, 1, \n 43.021403, 8.604281, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [209, 1, \n 59.784717, 11.956943, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [210, 1, \n 68.68145, 13.73629, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [211, 1, \n 241.36201, 48.272402, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [212, 1, \n 60.493983, 12.098797, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [213, 1, \n 283.582271, 56.716454, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [214, 1, \n 190.814923, 38.162985, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [215, 1, \n 403.487677, 80.697535, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [216, 1, \n 136.050691, 27.210138, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [217, 1, \n 43.595473, 8.719095, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [218, 1, \n 132.815127, 26.563025, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [219, 1, \n 213.450096, 42.690019, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [220, 1, 0,\n 0, 0, 0, 0, 0.999455, 0, 380.0, 0, 1.1, 0.9], [221, 1, 121.763271, \n 24.352654, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [222, 1, 0.0, 0.0, 0, \n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [223, 1, 120.674939, 24.134988, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [224, 1, 140.32833, 28.065666, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [225, 1, 251.967566, 50.393513, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [226, 1, 88.020057, 17.604011, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [227, 1, 109.655139, 21.931028, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [228, 1, 107.513515, 21.502703, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [229, 1, 237.909071, 47.581814, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [230, 1, 57.064182, 11.412836, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [231, 1, 0, 0, 0, 0, 0, 1.000723, 0, 380.0,\n 0, 1.1, 0.9], [232, 1, 0, 0, 0, 0, 0, 0.999961, 0, 380.0, 0, 1.1, 0.9],\n [233, 1, 0, 0, 0, 0, 0, 0.99979, 0, 380.0, 0, 1.1, 0.9], [234, 1, \n 203.268961, 40.653792, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [235, 1, \n 66.100357, 13.220071, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [236, 1, 0,\n 0, 0, 0, 0, 0.999982, 0, 380.0, 0, 1.1, 0.9], [237, 1, 0.547056, \n 0.109411, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [238, 1, 74.793756, \n 14.958751, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [239, 1, 103.336953, \n 20.667391, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [240, 1, 651.829683, \n 130.365937, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [241, 1, 482.331322, \n 96.466264, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [242, 1, 175.625563, \n 35.125113, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [243, 1, 141.694807, \n 28.338961, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [244, 1, 168.818838, \n 33.763768, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [245, 1, 0, 0, 0, 0, 0,\n 1.001057, 0, 380.0, 0, 1.1, 0.9], [246, 1, 0, 0, 0, 0, 0, 1.000289, 0, \n 380.0, 0, 1.1, 0.9], [247, 1, 33.501144, 6.700229, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [248, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1,\n 0.9], [249, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0, 0, 1.1, 0.9], [250, 1,\n 0, 0, 0, 0, 0, 1.000004, 0, 380.0, 0, 1.1, 0.9], [251, 1, 83.142241, \n 16.628448, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [252, 1, 213.221814, \n 42.644363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [253, 1, 93.612513, \n 18.722503, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [254, 1, 29.888922, \n 5.977784, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [255, 1, 146.991227, \n 29.398245, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [256, 1, 168.57336, \n 33.714672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [257, 1, 81.357232, \n 16.271446, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [258, 1, 265.133395, \n 53.026679, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [259, 1, 0, 0, 0, 0, 0,\n 0.999297, 0, 380.0, 0, 1.1, 0.9], [260, 1, 165.00861, 33.001722, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [261, 1, 0, 0, 0, 0, 0, 1.001077, 0, \n 380.0, 0, 1.1, 0.9], [262, 1, 0, 0, 0, 0, 0, 1.001213, 0, 380.0, 0, 1.1,\n 0.9], [263, 1, 236.704636, 47.340927, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [264, 1, 306.426919, 61.285384, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [265, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9], [266, 1,\n 147.677362, 29.535472, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [267, 1, \n 186.779823, 37.355965, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [268, 1, \n 64.951259, 12.990252, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [269, 1, \n 52.158296, 10.431659, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [270, 1, 0,\n 0, 0, 0, 0, 1.000028, 0, 380.0, 0, 1.1, 0.9], [271, 1, 0.0, 0.0, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [272, 1, 1.064221, 0.212844, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [273, 1, 145.532771, 29.106554, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [274, 1, 282.896536, 56.579307, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [275, 1, 52.959776, 10.591955, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [276, 1, 206.450669, 41.290134, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [277, 1, 0, 0, 0, 0, 0, 0.998999, 0, 380.0,\n 0, 1.1, 0.9], [278, 1, 161.168499, 32.2337, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [279, 1, 0, 0, 0, 0, 0, 0.999351, 0, 380.0, 0, 1.1, 0.9], [\n 280, 1, 0, 0, 0, 0, 0, 0.999232, 0, 380.0, 0, 1.1, 0.9], [281, 1, \n 212.884285, 42.576857, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [282, 1, \n 301.051348, 60.21027, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [283, 1, \n 120.674452, 24.13489, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [284, 1, \n 183.068733, 36.613747, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [285, 1, \n 81.642233, 16.328447, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [286, 1, \n 171.108932, 34.221786, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [287, 1, \n 105.167309, 21.033462, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [288, 1, \n 67.642881, 13.528576, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [289, 1, \n 106.382633, 21.276527, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [290, 1, 0,\n 0, 0, 0, 0, 1.004328, 0, 380.0, 0, 1.1, 0.9], [291, 1, 70.009154, \n 14.001831, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [292, 1, 138.019839, \n 27.603968, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [293, 1, 121.642104, \n 24.328421, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [294, 1, 32.415782, \n 6.483156, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [295, 1, 67.825108, \n 13.565022, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [296, 1, 192.55564, \n 38.511128, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [297, 1, 202.378138, \n 40.475628, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [298, 1, 106.85968, \n 21.371936, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [299, 1, 103.492889, \n 20.698578, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [300, 1, 281.942654, \n 56.388531, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [301, 1, 0, 0, 0, 0, 0,\n 0.999155, 0, 380.0, 0, 1.1, 0.9], [302, 1, 237.502196, 47.500439, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [303, 1, 121.988016, 24.397603, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [304, 1, 104.751195, 20.950239, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [305, 1, 0, 0, 0, 0, 0, 0.999588, 0, 380.0,\n 0, 1.1, 0.9], [306, 1, 0, 0, 0, 0, 0, 1.001506, 0, 380.0, 0, 1.1, 0.9],\n [307, 1, 124.244651, 24.84893, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 308, 1, 153.177102, 30.63542, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 309, 1, 250.619279, 50.123856, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 310, 1, 0, 0, 0, 0, 0, 0.999979, 0, 380.0, 0, 1.1, 0.9], [311, 1, \n 212.878265, 42.575653, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [312, 1, \n 95.737279, 19.147456, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [313, 1, 0,\n 0, 0, 0, 0, 0.999814, 0, 380.0, 0, 1.1, 0.9], [314, 1, 296.533151, \n 59.30663, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [315, 1, 0, 0, 0, 0, 0,\n 1.00139, 0, 380.0, 0, 1.1, 0.9], [316, 1, 116.185543, 23.237109, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [317, 1, 156.439578, 31.287916, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [318, 1, 257.087979, 51.417596, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [319, 1, 9.209919, 1.841984, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [320, 1, 0, 0, 0, 0, 0, 0.999999, 0, 380.0, 0, \n 1.1, 0.9], [321, 1, 217.864189, 43.572838, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [322, 1, 27.735514, 5.547103, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [323, 1, 2.885644, 0.577129, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [324, 1, 510.112065, 102.022413, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [325, 1, 166.171204, 33.234241, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 326, 1, 13.472646, 2.694529, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [327,\n 1, 115.941313, 23.188263, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [328, 1,\n 197.581816, 39.516363, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [329, 1, \n 297.180666, 59.436133, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [330, 1, 0,\n 0, 0, 0, 0, 1.001493, 0, 380.0, 0, 1.1, 0.9], [331, 1, 23.595134, \n 4.719027, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [332, 1, 0, 0, 0, 0, 0,\n 0.997873, 0, 380.0, 0, 1.1, 0.9], [333, 1, 247.920323, 49.584065, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [334, 1, 0, 0, 0, 0, 0, 0.999337, 0, \n 380.0, 0, 1.1, 0.9], [335, 1, 253.021394, 50.604279, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [336, 1, 0, 0, 0, 0, 0, 0.997899, 0, 380.0, 0, 1.1,\n 0.9], [337, 1, 100.644491, 20.128898, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [338, 1, 273.163452, 54.63269, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [339, 1, 168.947817, 33.789563, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [340, 1, 142.841965, 28.568393, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [341, 1, 129.13199, 25.826398, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [342, 1, 224.001511, 44.800302, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [343, 1, 122.890495, 24.578099, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [344, 1, 308.115905, 61.623181, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [345, 1, 336.91261, 67.382522, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [346, 1, 334.468328, 66.893666, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [347, 1, 116.96938, 23.393876, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [348, 1, 305.765662, 61.153132, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [349, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [350, 1, \n 160.409129, 32.081826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [351, 1, 0,\n 0, 0, 0, 0, 0.999777, 0, 380.0, 0, 1.1, 0.9], [352, 1, 1061.795233, \n 212.359047, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [353, 1, 3.192111, \n 0.638422, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [354, 1, 21.686927, \n 4.337385, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [355, 1, 0.0, 0.0, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [356, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [357, 1, 0.054362, 0.010872, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [358, 1, 0, 0, 0, 0, 0, 1.00115, 0, 380.0, 0, 1.1,\n 0.9], [359, 1, 3.174021, 0.634804, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [360, 1, 0, 0, 0, 0, 0, 1.000702, 0, 380.0, 0, 1.1, 0.9], [361, 1, \n 81.236209, 16.247242, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [362, 1, \n 231.565239, 46.313048, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [363, 1, \n 340.939078, 68.187816, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [364, 1, \n 80.439881, 16.087976, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [365, 1, \n 72.199065, 14.439813, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [366, 1, \n 143.098317, 28.619663, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [367, 1, \n 69.167782, 13.833556, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [368, 1, \n 34.059353, 6.811871, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [369, 1, \n 27.987713, 5.597543, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [370, 1, \n 82.396627, 16.479325, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [371, 1, \n 414.583549, 82.91671, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [372, 1, \n 240.422957, 48.084591, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [373, 1, \n 162.237585, 32.447517, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [374, 1, \n 83.192687, 16.638537, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [375, 1, \n 272.900898, 54.58018, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [376, 1, \n 299.320888, 59.864178, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [377, 1, \n 214.189404, 42.837881, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [378, 1, \n 213.777134, 42.755427, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [379, 1, \n 73.679821, 14.735964, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [380, 1, 0,\n 0, 0, 0, 0, 1.001413, 0, 380.0, 0, 1.1, 0.9], [381, 1, 246.389815, \n 49.277963, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [382, 1, 0, 0, 0, 0, 0,\n 0.999769, 0, 380.0, 0, 1.1, 0.9], [383, 1, 0, 0, 0, 0, 0, 0.999055, 0, \n 380.0, 0, 1.1, 0.9], [384, 1, 86.944845, 17.388969, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [385, 1, 109.741458, 21.948292, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [386, 1, 88.173972, 17.634794, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [387, 1, 179.569895, 35.913979, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [388, 1, 964.28771, 192.857542, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [389, 1, 0, 0, 0, 0, 0, 0.999912, 0, 380.0, 0, 1.1,\n 0.9], [390, 1, 79.618981, 15.923796, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [391, 1, 90.692809, 18.138562, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [392, 1, 174.038589, 34.807718, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [393, 1, 217.341636, 43.468327, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [394, 1, 78.171538, 15.634308, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [395, 1, 108.340922, 21.668184, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [396, 1, 76.736766, 15.347353, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [397, 1, 615.344336, 123.068867, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [398, 1, 266.518924, 53.303785, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [399, 1, 113.556473, 22.711295, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [400, 1, 60.500986, 12.100197, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [401, 1, 0, 0, 0, 0, 0, 1.000677, 0, 380.0, 0, 1.1, 0.9], [402, 1,\n 0, 0, 0, 0, 0, 1.000436, 0, 380.0, 0, 1.1, 0.9], [403, 1, 30.040147, \n 6.008029, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [404, 1, 105.833295, \n 21.166659, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [405, 1, 797.878416, \n 159.575683, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [406, 1, 60.453087, \n 12.090617, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [407, 1, 119.66821, \n 23.933642, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [408, 1, 346.013355, \n 69.202671, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [409, 1, 0, 0, 0, 0, 0,\n 0.999958, 0, 380.0, 0, 1.1, 0.9], [410, 1, 44.798315, 8.959663, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [411, 1, 42.358642, 8.471728, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [412, 1, 2.975231, 0.595046, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [413, 1, 148.528896, 29.705779, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [414, 1, 12.61171, 2.522342, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [415, 1, 0, 0, 0, 0, 0, 1.000312, 0, 380.0, 0, 1.1,\n 0.9], [416, 1, 179.604023, 35.920805, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [417, 1, 7.027563, 1.405513, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [418, 1, 146.450173, 29.290035, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 419, 1, 78.276577, 15.655315, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 420, 1, 78.808605, 15.761721, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 421, 1, 113.521787, 22.704357, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 422, 1, 83.169866, 16.633973, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 423, 1, 174.675097, 34.935019, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 424, 1, 12.593624, 2.518725, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [425,\n 1, 103.425433, 20.685087, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [426, 1,\n 8.569115, 1.713823, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [427, 1, \n 72.014986, 14.402997, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [428, 1, \n 32.289285, 6.457857, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [429, 1, \n 364.376918, 72.875384, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [430, 1, \n 194.091051, 38.81821, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [431, 1, \n 129.791667, 25.958333, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [432, 1, \n 151.718496, 30.343699, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [433, 1, \n 77.554451, 15.51089, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [434, 1, \n 40.363113, 8.072623, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [435, 1, \n 161.427044, 32.285409, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [436, 1, \n 86.18319, 17.236638, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [437, 1, \n 19.627317, 3.925463, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [438, 1, \n 52.674345, 10.534869, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [439, 1, \n 98.072822, 19.614564, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [440, 1, \n 82.881556, 16.576311, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [441, 1, \n 63.539817, 12.707963, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [442, 1, \n 84.084687, 16.816937, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [443, 1, \n 182.303517, 36.460703, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [444, 1, 0,\n 0, 0, 0, 0, 0.999997, 0, 380.0, 0, 1.1, 0.9], [445, 1, 82.836611, \n 16.567322, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [446, 1, 38.410594, \n 7.682119, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [447, 1, 73.026044, \n 14.605209, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [448, 1, 53.666726, \n 10.733345, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [449, 1, 270.605805, \n 54.121161, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [450, 1, 165.59784, \n 33.119568, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [451, 1, 70.760773, \n 14.152155, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [452, 1, 0, 0, 0, 0, 0,\n 0.999998, 0, 380.0, 0, 1.1, 0.9], [453, 1, 47.423447, 9.484689, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [454, 1, 33.085725, 6.617145, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [455, 1, 53.94349, 10.788698, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [456, 1, 53.94349, 10.788698, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [457, 1, 165.431157, 33.086231, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [458, 1, 157.345889, 31.469178, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [459, 1, 191.495801, 38.29916, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [460, 1, 251.664937, 50.332987, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [461, 1, 261.786107, 52.357221, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [462, 1, 80.081727, 16.016345, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [463, 1, 41.034378, 8.206876, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [464, 1, 41.083979, 8.216796, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [465, 1, 66.361856, 13.272371, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [466, 1, 53.877431, 10.775486, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [467, 1, 49.719948, 9.94399, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [468, 1, 81.521062, 16.304212, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [469, 1, 50.516969, 10.103394, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [470, 1, 128.647331, 25.729466, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [471, 1, 126.664898, 25.33298, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [472, 1, 44.303564, 8.860713, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [473, 1, 81.351906, 16.270381, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [474, 1, 42.017409, 8.403482, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [475, 1, 41.233718, 8.246744, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [476, 1, 46.600885, 9.320177, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [477, 1, 75.203749, 15.04075, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [478, 1, 94.469615, 18.893923, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [479, 1, 171.199924, 34.239985, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [480, 1, 75.040029, 15.008006, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [481, 1, 65.168234, 13.033647, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [482, 1, 73.995726, 14.799145, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [483, 1, 62.927942, 12.585588, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [484, 1, 49.332446, 9.866489, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [485, 1, 73.689618, 14.737924, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [486, 1, 677.908485, 135.581697, 0, 0, 0, 0.999529,\n 0, 220.0, 0, 1.1, 0.9], [487, 1, 171.778877, 34.355775, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [488, 1, 494.972714, 98.994543, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [489, 1, 130.27541, 26.055082, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [490, 1, 40.536849, 8.10737, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [491, 1, 55.738688, 11.147738, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [492, 1, 86.919491, 17.383898, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [493, 1, 112.02882, 22.405764, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [494, 1, 153.112663, 30.622533, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [495, 1, 120.527031, 24.105406, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [496, 1, 8.537131, 1.707426, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [497, 1, 1067.56553, 213.513106, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [498, 1, 50.067861, 10.013572, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [499, 1, 69.886531, 13.977306, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [500, 1, 38.262053, 7.652411, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [501, 1, 64.732797, 12.946559, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [502, 1, 255.486945, 51.097389, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [503, 1, 78.245685, 15.649137, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [504, 1, 51.23895, 10.24779, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [505, 1, 363.426388, 72.685278, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [506, 1, 114.075071, 22.815014, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [507, 1, 108.509533, 21.701907, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [508, 1, 157.749113, 31.549823, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [509, 1, 207.882042, 41.576408, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [510, 1, 131.331505, 26.266301, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [511, 1, 114.561196, 22.912239, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [512, 1, 75.674743, 15.134949, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [513, 1, 41.688708, 8.337742, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [514, 1, 103.75916, 20.751832, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [515, 1, 92.559335, 18.511867, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [516, 1, 103.552116, 20.710423, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [517, 1, 48.640908, 9.728182, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [518, 1, 273.948672, 54.789734, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [519, 1, 26.961565, 5.392313, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [520, 1, 108.854273, 21.770855, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [521, 1, 98.332374, 19.666475, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [522, 1, 84.192975, 16.838595, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [523, 1, 45.320121, 9.064024, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [524, 1, 131.541254, 26.308251, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [525, 1, 156.710188, 31.342038, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [526, 1, 47.511581, 9.502316, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [527, 1, 52.164378, 10.432876, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [528, 1, 113.853632, 22.770726, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [529, 1, 145.943497, 29.188699, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [530, 1, 61.844893, 12.368979, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [531, 1, 62.879916, 12.575983, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [532, 1, 60.353758, 12.070752, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [533, 1, 54.08425, 10.81685, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [534, 1, 149.194629, 29.838926, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [535, 1, 186.7821, 37.35642, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [536, 1, 147.224547, 29.444909, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [537, 1, 48.97554, 9.795108, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [538, 1, 36.610772, 7.322154, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [539, 1, 38.84628, 7.769256, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [540, 1, 34.979369, 6.995874, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [541, 1, 90.354729, 18.070946, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [542, 1, 124.119469, 24.823894, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [543, 1, 67.793444, 13.558689, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [544, 1, 126.266241, 25.253248, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [545, 1, 271.871924, 54.374385, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [546, 1, 136.266314, 27.253263, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [547, 1, 176.133164, 35.226633, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [548, 1, 57.015034, 11.403007, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [549, 1, 48.752729, 9.750546, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [550, 1, 40.229292, 8.045858, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [551, 1, 38.780067, 7.756013, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [552, 1, 192.577447, 38.515489, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [553, 1, 1.332338, 0.266468, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [554, 1, 195.101058, 39.020212, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [555, 1, 74.335663, 14.867133, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [556, 1, 114.999744, 22.999949, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [557, 1, 244.333084, 48.866617, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [558, 1, 144.073127, 28.814625, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [559, 1, 77.10655, 15.42131, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [560, 1, 120.458673, 24.091735, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [561, 1, 66.05602, 13.211204, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [562, 1, 180.460098, 36.09202, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [563, 1, 126.878156, 25.375631, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [564, 1, 250.521271, 50.104254, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [565, 1, 189.030713, 37.806143, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [566, 1, 0.303624, 0.060725, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [567, 1, 307.277901, 61.45558, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [568, 1, 284.157713, 56.831543, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [569, 1, 199.935363, 39.987073, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [570, 1, 312.135104, 62.427021, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [571, 1, 229.817616, 45.963523, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [572, 1, 405.359904, 81.071981, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [573, 1, 117.994953, 23.598991, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [574, 1, 224.825446, 44.965089, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [575, 1, 4.224873, 0.844975, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [576, 1, 273.386233, 54.677247, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [577, 1, 301.379822, 60.275964, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [578, 1, 287.747363, 57.549473, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [579, 1, 104.978091, 20.995618, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [580, 1, 21.854877, 4.370975, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [581, 1, 0.12558, 0.025116, 0, 0, 0, 1.0, 0, 220.0,\n 0, 1.1, 0.9], [582, 1, 79.071055, 15.814211, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [583, 1, 90.691594, 18.138319, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [584, 1, 52.034493, 10.406899, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [585, 1, 90.338282, 18.067656, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9]])\n', (120, 79219), False, 'from numpy import array\n'), ((94108, 158043), 'numpy.array', 'array', (['[[586, 47.326635, 0, 9999, -9999, 1.0, 100, 1, 272.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [589, 63.1, 0, 9999, -9999, 1.0, 100, 1, 63.1, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [590, 38.0, 0, 9999, -9999, 1.0, 100, 1,\n 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [593, 11.1, 0, 9999, -9999,\n 1.0, 100, 1, 11.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [594, 19.0, 0,\n 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [595, 876.332761, 0, 9999, -9999, 1.0, 100, 1, 4730.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [597, 95.0, 0, 9999, -9999, 1.0, 100, 1, 95.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [598, 12.0, 0, 9999, -9999, 1.0,\n 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [599, 9.3, 0, 9999,\n -9999, 1.0, 100, 1, 9.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [601, \n 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [602, 24.6, 0, 9999, -9999, 1.0, 100, 1, 24.6, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [603, 486.918756, 0, 9999, -9999, 1.0, 100, 1, \n 3455.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [607, 1800.0, 0, 9999, -\n 9999, 1.0, 100, 1, 1800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [608,\n 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [609, 36.4, 0, 9999, -9999, 1.0, 100, 1, 36.4, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [610, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [612, 30.0, 0, 9999, -9999, 1.0,\n 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [613, 85.0, 0, \n 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [614, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [616, 29.0, 0, 9999, -9999, 1.0, 100, 1, 29.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [617, 137.0, 0, 9999, -9999, 1.0, 100, 1, \n 137.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [618, 33.4, 0, 9999, -\n 9999, 1.0, 100, 1, 33.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [619, \n 118.0, 0, 9999, -9999, 1.0, 100, 1, 118.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [621, 765.0, 0, 9999, -9999, 1.0, 100, 1, 765.0, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [623, 760.0, 0, 9999, -9999, 1.0, 100, 1, \n 760.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [624, 27.0, 0, 9999, -\n 9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [628, \n 449.0, 0, 9999, -9999, 1.0, 100, 1, 449.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [629, 75.3, 0, 9999, -9999, 1.0, 100, 1, 75.3, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [631, 79.8, 0, 9999, -9999, 1.0, 100, 1, 79.8,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [632, 45.1, 0, 9999, -9999, 1.0,\n 100, 1, 45.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [637, 53.7, 0, \n 9999, -9999, 1.0, 100, 1, 53.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [638, 128.7, 0, 9999, -9999, 1.0, 100, 1, 128.7, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [639, 15.8, 0, 9999, -9999, 1.0, 100, 1, 15.8, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [640, 12.0, 0, 9999, -9999, 1.0, 100, 1,\n 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [641, 12.6, 0, 9999, -9999,\n 1.0, 100, 1, 12.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [642, 28.9, 0,\n 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [643, 857.0, 0, 9999, -9999, 1.0, 100, 1, 857.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [646, 103.0, 0, 9999, -9999, 1.0, 100, 1, 103.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [647, 14.0, 0, 9999, -9999, 1.0, 100, 1,\n 14.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [650, 1324.5, 0, 9999, -\n 9999, 1.0, 100, 1, 1324.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [652,\n 46.9, 0, 9999, -9999, 1.0, 100, 1, 46.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [655, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [657, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [658, 95.0, 0, 9999, -9999, 1.0,\n 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [661, 32.7, 0, \n 9999, -9999, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [662, 9.2, 0, 9999, -9999, 1.0, 100, 1, 9.2, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [663, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [666, 28.9, 0, 9999, -9999, 1.0, 100, 1, \n 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [668, 766.0, 0, 9999, -\n 9999, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [670, \n 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [672, 33.1, 0, 9999, -9999, 1.0, 100, 1, 33.1, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [676, 370.0, 0, 9999, -9999, 1.0, 100, 1, 370.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [677, 13.4, 0, 9999, -9999, 1.0,\n 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [678, 1017.0, 0, \n 9999, -9999, 1.0, 100, 1, 1017.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [679, 545.306254, 0, 9999, -9999, 1.0, 100, 1, 695.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [681, 40.1, 0, 9999, -9999, 1.0, 100, 1, 40.1, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [683, 27.5, 0, 9999, -9999, 1.0,\n 100, 1, 27.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [687, 1329.0, 0, \n 9999, -9999, 1.0, 100, 1, 1329.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [689, 310.0, 0, 9999, -9999, 1.0, 100, 1, 310.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [691, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [693, 194.0, 0, 9999, -9999, 1.0, 100, 1,\n 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [694, 16.4, 0, 9999, -\n 9999, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [695, \n 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [696, 721.0, 0, 9999, -9999, 1.0, 100, 1, 721.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [697, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [698, 24.0, 0, 9999, -9999, 1.0,\n 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [699, 104.6, 0, \n 9999, -9999, 1.0, 100, 1, 104.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [700, 27.0, 0, 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [701, 47.2, 0, 9999, -9999, 1.0, 100, 1, 47.2, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [702, 73.4, 0, 9999, -9999, 1.0, 100, 1, \n 73.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [704, 508.0, 0, 9999, -\n 9999, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [705, \n 17.0, 0, 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [707, 34.0, 0, 9999, -9999, 1.0, 100, 1, 34.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [708, 7.8, 0, 9999, -9999, 1.0, 100, 1, 7.8, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [711, 109.981679, 0, 9999, -9999, 1.0,\n 100, 1, 176.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [713, 13.4, 0, \n 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [714, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [716, 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [717, 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [719, 1336.155648, 0, 9999, -\n 9999, 1.0, 100, 1, 1958.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [721,\n 4.0, 0, 9999, -9999, 1.0, 100, 1, 4.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [722, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [723, 19.7, 0, 9999, -9999, 1.0, 100, 1, 19.7, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [724, 12.1, 0, 9999, -9999, 1.0,\n 100, 1, 12.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [725, 800.0, 0, \n 9999, -9999, 1.0, 100, 1, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [726, 126.0, 0, 9999, -9999, 1.0, 100, 1, 126.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [727, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [728, 510.0, 0, 9999, -9999, 1.0, 100, 1,\n 510.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [730, 633.2, 0, 9999, -\n 9999, 1.0, 100, 1, 633.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [731, \n 715.079188, 0, 9999, -9999, 1.0, 100, 1, 895.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [732, 14.6, 0, 9999, -9999, 1.0, 100, 1, 14.6, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [733, 396.6, 0, 9999, -9999, 1.0, 100, 1,\n 396.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [735, 84.8, 0, 9999, -\n 9999, 1.0, 100, 1, 84.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [736, \n 32.0, 0, 9999, -9999, 1.0, 100, 1, 32.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [737, 28.0, 0, 9999, -9999, 1.0, 100, 1, 28.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [738, 138.5, 0, 9999, -9999, 1.0, 100, 1, 138.5, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [739, 59.9, 0, 9999, -9999, 1.0,\n 100, 1, 59.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [741, 214.0, 0, \n 9999, -9999, 1.0, 100, 1, 214.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [742, 9.0, 0, 9999, -9999, 1.0, 100, 1, 9.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [743, 190.321651, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [745, 42.0, 0, 9999, -9999, 1.0, 100,\n 1, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [746, 100.0, 0, 9999, -\n 9999, 1.0, 100, 1, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [747, \n 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [748, 110.0, 0, 9999, -9999, 1.0, 100, 1, 110.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [749, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [750, 90.8, 0, 9999, -9999, 1.0,\n 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [758, 18.5, 0, \n 9999, -9999, 1.0, 100, 1, 18.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [760, 317.678066, 0, 9999, -9999, 1.0, 100, 1, 794.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [761, 15.7, 0, 9999, -9999, 1.0, 100, 1, 15.7, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [762, 1105.0, 0, 9999, -9999, \n 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [763, 20.3,\n 0, 9999, -9999, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [765, 59.0, 0, 9999, -9999, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [767, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [769, 43.3, 0, 9999, -9999, 1.0, 100, 1,\n 43.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [771, 690.0, 0, 9999, -\n 9999, 1.0, 100, 1, 690.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [772, \n 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [774, 33.5, 0, 9999, -9999, 1.0, 100, 1, 33.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [775, 128.0, 0, 9999, -9999, 1.0, 100, 1, 128.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [776, 56.0, 0, 9999, -9999, 1.0,\n 100, 1, 56.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [777, 79.0, 0, \n 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [778, 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [779, 34.2, 0, 9999, -9999, 1.0, 100, 1, 34.2, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [781, 977.621115, 0, 9999, -9999, 1.0, 100,\n 1, 1310.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [784, 780.2167, 0, \n 9999, -9999, 1.0, 100, 1, 1275.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [785, 3.0, 0, 9999, -9999, 1.0, 100, 1, 3.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [786, 195.4, 0, 9999, -9999, 1.0, 100, 1, 195.4, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [787, 778.0, 0, 9999, -9999, 1.0, 100, 1, \n 778.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [788, 875.0, 0, 9999, -\n 9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [789, \n 77.4, 0, 9999, -9999, 1.0, 100, 1, 77.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [791, 10.0, 0, 9999, -9999, 1.0, 100, 1, 10.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [792, 62.7, 0, 9999, -9999, 1.0, 100, 1, 62.7, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [793, 9.8, 0, 9999, -9999, 1.0, \n 100, 1, 9.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [794, 0.2, 0, 9999,\n -9999, 1.0, 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [795, \n 13.6, 0, 9999, -9999, 1.0, 100, 1, 13.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [796, 85.1, 0, 9999, -9999, 1.0, 100, 1, 85.1, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [798, 209.378239, 0, 9999, -9999, 1.0, 100, 1, \n 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [800, 36.5, 0, 9999, -\n 9999, 1.0, 100, 1, 36.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [801, \n 18.60032, 0, 9999, -9999, 1.0, 100, 1, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [802, 500.0, 0, 9999, -9999, 1.0, 100, 1, 500.0, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [805, 875.108011, 0, 9999, -9999, 1.0, 100,\n 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [806, 35.8, 0, 9999, \n -9999, 1.0, 100, 1, 35.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [808, \n 217.5, 0, 9999, -9999, 1.0, 100, 1, 217.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [809, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [811, 25.2, 0, 9999, -9999, 1.0, 100, 1, 25.2,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [814, 89.0, 0, 9999, -9999, 1.0,\n 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [816, 80.1, 0, \n 9999, -9999, 1.0, 100, 1, 80.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [817, 54.0, 0, 9999, -9999, 1.0, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [818, 216.869854, 0, 9999, -9999, 1.0, 100, 1, 757.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [821, 82.5, 0, 9999, -9999, 1.0, 100,\n 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [822, 134.0, 0, 9999, -\n 9999, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [825, \n 42.7, 0, 9999, -9999, 1.0, 100, 1, 42.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [826, 58.0, 0, 9999, -9999, 1.0, 100, 1, 58.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [830, 89.0, 0, 9999, -9999, 1.0, 100, 1, 89.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [833, 18.6, 0, 9999, -9999, 1.0,\n 100, 1, 18.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [834, 23.3, 0, \n 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [835, 63.7, 0, 9999, -9999, 1.0, 100, 1, 63.7, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [836, 25.5, 0, 9999, -9999, 1.0, 100, 1, 25.5, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [837, 472.0, 0, 9999, -9999, 1.0, 100, 1, \n 472.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [839, 73.3, 0, 9999, -\n 9999, 1.0, 100, 1, 73.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [840, \n 90.722956, 0, 9999, -9999, 1.0, 100, 1, 1391.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [841, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [843, 333.0, 0, 9999, -9999, 1.0, 100, 1,\n 333.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [844, 40.0, 0, 9999, -\n 9999, 1.0, 100, 1, 40.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [845, \n 318.0, 0, 9999, -9999, 1.0, 100, 1, 318.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [848, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [849, 779.0, 0, 9999, -9999, 1.0, 100, 1, \n 779.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [850, 16.0, 0, 9999, -\n 9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [851, \n 79.5, 0, 9999, -9999, 1.0, 100, 1, 79.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [852, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [853, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [855, 688.0, 0, 9999, -9999, 1.0,\n 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [856, 36.0, 0, \n 9999, -9999, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [857, 1402.0, 0, 9999, -9999, 1.0, 100, 1, 1402.0, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [858, 56.8, 0, 9999, -9999, 1.0, 100, 1, 56.8, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [859, 85.0, 0, 9999, -9999, 1.0, 100,\n 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [860, 25.0, 0, 9999, -\n 9999, 1.0, 100, 1, 25.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [862, \n 725.0, 0, 9999, -9999, 1.0, 100, 1, 725.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [863, 0.6, 0, 9999, -9999, 1.0, 100, 1, 0.6, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [864, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [865, 11.0, 0, 9999, -9999, 1.0,\n 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [866, 260.44, 0, \n 9999, -9999, 1.0, 100, 1, 260.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [867, 769.0, 0, 9999, -9999, 1.0, 100, 1, 769.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [869, 1360.0, 0, 9999, -9999, 1.0, 100, 1, 1360.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [870, 58.4, 0, 9999, -9999, 1.0, 100,\n 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [872, 22.5, 0, 9999, -\n 9999, 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [873, \n 122.0, 0, 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [874, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [875, 24.4, 0, 9999, -9999, 1.0, 100, 1, 24.4,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [876, 58.4, 0, 9999, -9999, 1.0,\n 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [877, 24.8, 0, \n 9999, -9999, 1.0, 100, 1, 24.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [881, 1001.3, 0, 9999, -9999, 1.0, 100, 1, 1001.3, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [882, 17.4, 0, 9999, -9999, 1.0, 100, 1, 17.4, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [883, 18.0, 0, 9999, -9999, 1.0, 100,\n 1, 18.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [885, 490.0, 0, 9999, -\n 9999, 1.0, 100, 1, 490.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [886, \n 2572.0, 0, 9999, -9999, 1.0, 100, 1, 2572.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [888, 35.1, 0, 9999, -9999, 1.0, 100, 1, 35.1, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [889, 9.5, 0, 9999, -9999, 1.0, 100, 1, 9.5,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [890, 48.0, 0, 9999, -9999, 1.0,\n 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [895, 19.0, 0, \n 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [896, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [897, 56.0, 0, 9999, -9999, 1.0, 100, 1, 56.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [898, 84.6, 0, 9999, -9999, 1.0, 100, 1, \n 84.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [899, 8.5, 0, 9999, -9999,\n 1.0, 100, 1, 8.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [900, 112.6, 0,\n 9999, -9999, 1.0, 100, 1, 112.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [902, 19.5, 0, 9999, -9999, 1.0, 100, 1, 19.5, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [903, 20.1, 0, 9999, -9999, 1.0, 100, 1, 20.1, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [905, 137.3, 0, 9999, -9999, 1.0, 100, 1, \n 137.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [906, 66.0, 0, 9999, -\n 9999, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [907, \n 67.3, 0, 9999, -9999, 1.0, 100, 1, 67.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [909, 36.8, 0, 9999, -9999, 1.0, 100, 1, 36.8, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [913, 74.0, 0, 9999, -9999, 1.0, 100, 1, 74.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [915, 12.0, 0, 9999, -9999, 1.0,\n 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [917, 17.0, 0, \n 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [918, 38.5, 0, 9999, -9999, 1.0, 100, 1, 38.5, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [920, 12.8, 0, 9999, -9999, 1.0, 100, 1, 12.8, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [921, 124.0, 0, 9999, -9999, 1.0, 100, 1, \n 124.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [922, 164.0, 0, 9999, -\n 9999, 1.0, 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [923, \n 146.0, 0, 9999, -9999, 1.0, 100, 1, 146.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [924, 11.7, 0, 9999, -9999, 1.0, 100, 1, 11.7, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [925, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [928, 61.5, 0, 9999, -9999, 1.0,\n 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [931, 217.1, 0, \n 9999, -9999, 1.0, 100, 1, 217.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [934, 181.07236, 0, 9999, -9999, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [935, 23.1, 0, 9999, -9999, 1.0, 100, 1, 23.1, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [936, 104.4, 0, 9999, -9999, 1.0, 100,\n 1, 104.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [937, 30.0, 0, 9999, -\n 9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [939, \n 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [940, 29.6, 0, 9999, -9999, 1.0, 100, 1, 29.6, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [942, 51.9, 0, 9999, -9999, 1.0, 100, 1, 51.9, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [944, 25.4, 0, 9999, -9999, 1.0,\n 100, 1, 25.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [945, 35.0, 0, \n 9999, -9999, 1.0, 100, 1, 35.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [948, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [950, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [952, 31.7, 0, 9999, -9999, 1.0, 100, 1, \n 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [956, 43.862599, 0, 9999, \n -9999, 1.0, 100, 1, 65.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [957, \n 6.0, 0, 9999, -9999, 1.0, 100, 1, 6.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [958, 66.7, 0, 9999, -9999, 1.0, 100, 1, 66.7, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [959, 45.5, 0, 9999, -9999, 1.0, 100, 1, 45.5, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [960, 26.5, 0, 9999, -9999, 1.0,\n 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [963, 780.274633, \n 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [965, 352.0, 0, 9999, -9999, 1.0, 100, 1, 352.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [966, 66.0, 0, 9999, -9999, 1.0, 100, 1, 66.0, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [967, 37.5, 0, 9999, -9999, 1.0, 100,\n 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [968, 54.0, 0, 9999, -\n 9999, 0.999529, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 969, 56.9, 0, 9999, -9999, 0.999529, 100, 1, 56.9, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [971, 20.0, 0, 9999, -9999, 1.0, 100, 1, 20.0, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [972, 390.0, 0, 9999, -9999, 1.0, 100,\n 1, 390.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [973, 1347.0, 0, 9999,\n -9999, 1.0, 100, 1, 1347.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [975,\n 52.5, 0, 9999, -9999, 1.0, 100, 1, 52.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [976, 26.9, 0, 9999, -9999, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [977, 324.0, 0, 9999, -9999, 1.0, 100, 1, 324.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [978, 4.6, 0, 9999, -9999, 1.0, \n 100, 1, 4.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [981, 119.0, 0, \n 9999, -9999, 1.0, 100, 1, 119.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [982, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [983, 44.0, 0, 9999, -9999, 1.0, 100, 1, 44.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [984, 465.0, 0, 9999, -9999, 1.0, 100, 1, \n 465.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [985, 22.0, 0, 9999, -\n 9999, 1.0, 100, 1, 22.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [986, \n 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [987, 164.5, 0, 9999, -9999, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [988, 5.1, 0, 9999, -9999, 1.0, 100, 1, 5.1, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [990, 176.488981, 0, 9999, -9999, 1.0,\n 100, 1, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [993, 392.0, 0, \n 9999, -9999, 1.0, 100, 1, 392.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [994, 33.0, 0, 9999, -9999, 1.0, 100, 1, 33.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [995, 4.2, 0, 9999, -9999, 1.0, 100, 1, 4.2, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [996, 11.5, 0, 9999, -9999, 1.0, 100, 1, 11.5,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [997, 18.8, 0, 9999, -9999, 1.0,\n 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [998, 423.0, 0, \n 9999, -9999, 1.0, 100, 1, 423.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [999, 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [1000, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1002, 9.9, 0, 9999, -9999, 1.0, 100, 1, \n 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1003, 900.0, 0, 9999, -\n 9999, 1.0, 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1007,\n 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1008, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1010, 750.0, 0, 9999, -9999, 1.0, 100, 1, 750.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1011, 18.7, 0, 9999, -9999, 1.0,\n 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1012, 2822.8416, \n 0, 9999, -9999, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1018, 175.9, 0, 9999, -9999, 1.0, 100, 1, 175.9, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [1019, 120.0, 0, 9999, -9999, 1.0, 100, 1, 120.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1023, 0.2, 0, 9999, -9999, 1.0,\n 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1025, 113.6, 0, \n 9999, -9999, 1.0, 100, 1, 113.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1026, 655.6, 0, 9999, -9999, 1.0, 100, 1, 655.6, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1027, 42.143139, 0, 9999, -9999, 1.0, 100, 1, 48.3, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1028, 400.0, 0, 9999, -9999, \n 1.0, 100, 1, 400.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1029, 60.0,\n 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1030, 547.524827, 0, 9999, -9999, 1.0, 100, 1, 1018.0, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1031, 1447.199962, 0, 9999, -9999, 1.0, 100, \n 1, 1447.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1032, 37.130761, 0, \n 9999, -9999, 1.0, 100, 1, 153.510391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1033, 7.986228, 0, 9999, -9999, 1.0, 100, 1, 50.164506, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1034, 26.393282, 0, 9999, -9999, 1.0, 100,\n 1, 84.262779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1035, 10.23952, 0,\n 9999, -9999, 1.0, 100, 1, 49.886469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1036, 12.383139, 0, 9999, -9999, 1.0, 100, 1, 67.223077, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1037, 37.034771, 0, 9999, -9999, 1.0, 100,\n 1, 94.684044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1038, 32.273941, \n 0, 9999, -9999, 1.0, 100, 1, 85.798525, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1039, 16.259191, 0, 9999, -9999, 1.0, 100, 1, 132.724114, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1041, 23.220031, 0, 9999, -9999, 1.0,\n 100, 1, 204.187624, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1042, \n 5.897804, 0, 9999, -9999, 1.0, 100, 1, 52.70053, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1044, 17.843471, 0, 9999, -9999, 1.0, 100, 1, \n 36.163532, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1046, 79.936922, 0, \n 9999, -9999, 1.0, 100, 1, 106.787063, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1047, 7.020261, 0, 9999, -9999, 1.0, 100, 1, 13.029581, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1048, 40.016896, 0, 9999, -9999, 1.0, 100,\n 1, 71.656883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1049, 75.832425, \n 0, 9999, -9999, 1.0, 100, 1, 293.755375, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1050, 2.574285, 0, 9999, -9999, 1.0, 100, 1, 52.781606, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1051, 7.252584, 0, 9999, -9999, 1.0, \n 100, 1, 304.42978, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1052, \n 15.175791, 0, 9999, -9999, 1.0, 100, 1, 20.66869, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1053, 12.346923, 0, 9999, -9999, 1.0, 100, 1, \n 16.368087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1054, 216.322419, 0,\n 9999, -9999, 1.0, 100, 1, 273.855776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1055, 0.30681, 0, 9999, -9999, 1.0, 100, 1, 2.856069, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1056, 53.206333, 0, 9999, -9999, 1.0, 100, 1,\n 603.943953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1057, 100.378169, 0,\n 9999, -9999, 1.0, 100, 1, 426.979979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1058, 117.210047, 0, 9999, -9999, 1.0, 100, 1, 1055.735174, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1059, 43.35606, 0, 9999, -9999, 1.0, \n 100, 1, 414.871332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1060, \n 0.48885, 0, 9999, -9999, 1.0, 100, 1, 10.351632, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1061, 9.89646, 0, 9999, -9999, 1.0, 100, 1, 161.862597,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1062, 0.28386, 0, 9999, -9999, \n 1.0, 100, 1, 2.878561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1063, \n 0.835661, 0, 9999, -9999, 1.0, 100, 1, 8.670916, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1064, 19.595506, 0, 9999, -9999, 1.0, 100, 1, \n 209.786524, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1065, 38.415656, 0,\n 9999, -9999, 1.0, 100, 1, 339.421643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1066, 11.779239, 0, 9999, -9999, 1.0, 100, 1, 134.399019, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1067, 0.006954, 0, 9999, -9999, 1.0, \n 100, 1, 32.653526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1072, \n 53.011687, 0, 9999, -9999, 1.0, 100, 1, 112.606433, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1073, 54.828335, 0, 9999, -9999, 1.0, 100, 1, \n 77.81765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1074, 80.822515, 0, \n 9999, -9999, 1.0, 100, 1, 153.592986, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1077, 0.830976, 0, 9999, -9999, 1.0, 100, 1, 26.120041, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1079, 37.497464, 0, 9999, -9999, 1.0, 100,\n 1, 72.327992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1080, 0.003424, 0,\n 9999, -9999, 1.0, 100, 1, 132.149983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1081, 39.02582, 0, 9999, -9999, 1.0, 100, 1, 405.642115, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1082, 30.101628, 0, 9999, -9999, 1.0, 100,\n 1, 510.054159, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1083, 40.966551,\n 0, 9999, -9999, 1.0, 100, 1, 633.681488, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1084, 34.330508, 0, 9999, -9999, 1.0, 100, 1, 602.719371, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1085, 17.091912, 0, 9999, -9999, 1.0,\n 100, 1, 113.714399, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1086, \n 32.887191, 0, 9999, -9999, 1.0, 100, 1, 225.59917, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1087, 7.499037, 0, 9999, -9999, 1.0, 100, 1, \n 116.66597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1088, 3.179915, 0, \n 9999, -9999, 1.0, 100, 1, 36.782492, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1089, 21.693926, 0, 9999, -9999, 1.0, 100, 1, 384.449592, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1090, 68.505794, 0, 9999, -9999, 1.0, \n 100, 1, 89.140897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1091, \n 30.312336, 0, 9999, -9999, 1.0, 100, 1, 45.7939, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1092, 44.920748, 0, 9999, -9999, 1.0, 100, 1, \n 54.002032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1093, 65.837792, 0, \n 9999, -9999, 1.0, 100, 1, 155.605298, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1094, 0.941636, 0, 9999, -9999, 1.0, 100, 1, 3.759038, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1095, 0.050289, 0, 9999, -9999, 1.0, 100, \n 1, 0.204951, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1096, 29.200726, 0,\n 9999, -9999, 1.0, 100, 1, 84.50612, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1097, 2.055263, 0, 9999, -9999, 1.0, 100, 1, 4.601122, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1098, 34.647847, 0, 9999, -9999, 1.0, 100,\n 1, 71.025499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1099, 166.043557,\n 0, 9999, -9999, 1.0, 100, 1, 290.937198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1101, 35.196912, 0, 9999, -9999, 1.0, 100, 1, 83.930665, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1102, 85.484282, 0, 9999, -9999, 1.0, \n 100, 1, 350.979988, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1103, \n 88.696046, 0, 9999, -9999, 1.0, 100, 1, 245.381701, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1104, 0.046739, 0, 9999, -9999, 1.0, 100, 1, \n 0.206918, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1105, 0.763545, 0, \n 9999, -9999, 1.0, 100, 1, 2.178593, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1106, 0.538044, 0, 9999, -9999, 1.0, 100, 1, 2.289793, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1107, 9.315632, 0, 9999, -9999, 1.0, 100, \n 1, 76.221615, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1108, 46.051867, \n 0, 9999, -9999, 1.0, 100, 1, 320.422751, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1109, 0.188979, 0, 9999, -9999, 1.0, 100, 1, 0.77821, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1110, 0.495025, 0, 9999, -9999, 1.0, 100, \n 1, 1.654557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1111, 9.399663, 0,\n 9999, -9999, 1.0, 100, 1, 89.637993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1112, 17.835069, 0, 9999, -9999, 1.0, 100, 1, 69.53429, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1113, 0.897166, 0, 9999, -9999, 1.0, 100, \n 1, 3.536361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1114, 1.424338, 0,\n 9999, -9999, 1.0, 100, 1, 13.446889, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1115, 14.79839, 0, 9999, -9999, 1.0, 100, 1, 50.575278, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1116, 8.410982, 0, 9999, -9999, 1.0, 100, \n 1, 32.601142, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1117, 22.537982, \n 0, 9999, -9999, 1.0, 100, 1, 90.792541, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1118, 1.619728, 0, 9999, -9999, 1.0, 100, 1, 8.725012, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1119, 10.776742, 0, 9999, -9999, 1.0, \n 100, 1, 43.254023, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1120, \n 0.54849, 0, 9999, -9999, 1.0, 100, 1, 2.416001, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1121, 0.128871, 0, 9999, -9999, 1.0, 100, 1, 0.540589,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1122, 0.355104, 0, 9999, -9999,\n 1.0, 100, 1, 1.462883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1123, \n 0.279445, 0, 9999, -9999, 1.0, 100, 1, 1.464336, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1124, 0.319102, 0, 9999, -9999, 1.0, 100, 1, 1.288283,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1125, 4.97339, 0, 9999, -9999, \n 1.0, 100, 1, 25.818899, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1126, \n 5.785037, 0, 9999, -9999, 1.0, 100, 1, 29.154893, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1127, 36.406066, 0, 9999, -9999, 1.0, 100, 1, \n 105.296621, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1128, 0.968241, 0, \n 9999, -9999, 1.0, 100, 1, 3.06139, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1129, 1.446507, 0, 9999, -9999, 1.0, 100, 1, 4.738747, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1130, 0.288868, 0, 9999, -9999, 1.0, 100, 1, \n 1.025754, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1131, 0.882575, 0, \n 9999, -9999, 1.0, 100, 1, 2.897078, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1132, 0.099503, 0, 9999, -9999, 1.0, 100, 1, 0.359497, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1133, 0.171545, 0, 9999, -9999, 1.0, 100, \n 1, 0.719597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1134, 0.12121, 0, \n 9999, -9999, 1.0, 100, 1, 0.508453, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1135, 1.390811, 0, 9999, -9999, 1.0, 100, 1, 8.117819, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1136, 0.092905, 0, 9999, -9999, 1.0, 100, \n 1, 0.4027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1137, 0.470801, 0, \n 9999, -9999, 1.0, 100, 1, 3.669012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1138, 0.251948, 0, 9999, -9999, 1.0, 100, 1, 1.254278, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1139, 5.083469, 0, 9999, -9999, 1.0, 100, \n 1, 19.822769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1140, 4.060139, 0,\n 9999, -9999, 1.0, 100, 1, 28.389457, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1141, 31.800974, 0, 9999, -9999, 1.0, 100, 1, 119.46456, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1142, 0.257109, 0, 9999, -9999, 1.0, 100, \n 1, 1.215733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1143, 3.996627, 0,\n 9999, -9999, 1.0, 100, 1, 25.239356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1144, 15.96963, 0, 9999, -9999, 1.0, 100, 1, 52.527382, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1145, 124.051607, 0, 9999, -9999, 1.0, 100,\n 1, 175.889627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1146, 0.20533, 0,\n 9999, -9999, 1.0, 100, 1, 0.861317, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1147, 12.694751, 0, 9999, -9999, 1.0, 100, 1, 45.703707, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1148, 6.218355, 0, 9999, -9999, 1.0, 100, \n 1, 17.645529, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1149, 1.909193, 0,\n 9999, -9999, 1.0, 100, 1, 8.556784, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1150, 0.742648, 0, 9999, -9999, 1.0, 100, 1, 3.62256, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1151, 4.837401, 0, 9999, -9999, 1.0, 100, 1, \n 13.036113, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1152, 0.038213, 0, \n 9999, -9999, 1.0, 100, 1, 0.116518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1153, 0.013427, 0, 9999, -9999, 1.0, 100, 1, 0.068788, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1154, 0.031353, 0, 9999, -9999, 1.0, 100, \n 1, 0.160625, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1155, 0.221052, 0,\n 9999, -9999, 1.0, 100, 1, 0.609451, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1156, 4.092172, 0, 9999, -9999, 1.0, 100, 1, 16.022334, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1157, 1.612252, 0, 9999, -9999, 1.0, 100, \n 1, 4.354147, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1158, 0.260133, 0,\n 9999, -9999, 1.0, 100, 1, 1.04304, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1159, 3.421268, 0, 9999, -9999, 1.0, 100, 1, 13.498087, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1160, 92.70123, 0, 9999, -9999, 1.0, 100, \n 1, 238.377761, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1161, 1.447396, \n 0, 9999, -9999, 1.0, 100, 1, 25.263391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1162, 66.108496, 0, 9999, -9999, 1.0, 100, 1, 502.409178, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1163, 39.337378, 0, 9999, -9999, 1.0,\n 100, 1, 330.03194, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1164, \n 48.660165, 0, 9999, -9999, 1.0, 100, 1, 285.625412, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1165, 6.897139, 0, 9999, -9999, 1.0, 100, 1, \n 57.188579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1166, 39.149107, 0, \n 9999, -9999, 1.0, 100, 1, 83.277163, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1167, 1.266099, 0, 9999, -9999, 1.0, 100, 1, 5.05378, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1168, 0.406717, 0, 9999, -9999, 1.0, 100, 1, \n 1.345774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1169, 0.977901, 0, \n 9999, -9999, 1.0, 100, 1, 2.721845, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1170, 0.065614, 0, 9999, -9999, 1.0, 100, 1, 0.26599, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1173, 54.517441, 0, 9999, -9999, 1.0, 100, 1,\n 254.253327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1174, 0.315409, 0, \n 9999, -9999, 1.0, 100, 1, 1.260082, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1175, 0.295585, 0, 9999, -9999, 1.0, 100, 1, 0.855454, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1176, 0.083542, 0, 9999, -9999, 1.0, 100, \n 1, 0.23222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1177, 10.259509, 0,\n 9999, -9999, 1.0, 100, 1, 27.87401, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1178, 0.601094, 0, 9999, -9999, 1.0, 100, 1, 3.167999, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1179, 0.298531, 0, 9999, -9999, 1.0, 100, \n 1, 1.306293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1180, 0.17058, 0, \n 9999, -9999, 1.0, 100, 1, 0.688545, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1181, 40.321989, 0, 9999, -9999, 1.0, 100, 1, 85.739557, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1182, 51.507092, 0, 9999, -9999, 1.0, 100,\n 1, 99.319579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1183, 5.009306, 0,\n 9999, -9999, 1.0, 100, 1, 38.222575, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1184, 0.734019, 0, 9999, -9999, 1.0, 100, 1, 4.219005, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1185, 2.912064, 0, 9999, -9999, 1.0, 100, \n 1, 11.343971, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1186, 11.368637, \n 0, 9999, -9999, 1.0, 100, 1, 38.916368, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1187, 2.579245, 0, 9999, -9999, 1.0, 100, 1, 9.814574, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1188, 63.736435, 0, 9999, -9999, 1.0, \n 100, 1, 179.712741, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1189, \n 3.253623, 0, 9999, -9999, 1.0, 100, 1, 20.261805, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1190, 0.006209, 0, 9999, -9999, 1.0, 100, 1, \n 220.533673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1191, 0.0187, 0, \n 9999, -9999, 1.0, 100, 1, 73.079413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1196, 91.192992, 0, 9999, -9999, 1.0, 100, 1, 160.697956, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1197, 45.154128, 0, 9999, -9999, 1.0, \n 100, 1, 90.592266, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1198, \n 10.781322, 0, 9999, -9999, 1.0, 100, 1, 39.819157, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1199, 129.096066, 0, 9999, -9999, 1.0, 100, 1, \n 201.421956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1200, 40.048819, 0,\n 9999, -9999, 1.0, 100, 1, 56.012408, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1203, 0.000518, 0, 9999, -9999, 1.0, 100, 1, 182.623256, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1204, 6.696653, 0, 9999, -9999, 1.0, 100, \n 1, 47.541821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1211, 0.002325, 0,\n 9999, -9999, 1.0, 100, 1, 18.005229, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1212, 8.9e-05, 0, 9999, -9999, 1.0, 100, 1, 91.171888, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1213, 0.973196, 0, 9999, -9999, 1.0, 100, \n 1, 57.342704, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1214, 3.4e-05, 0,\n 9999, -9999, 1.0, 100, 1, 4.505907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1215, 0.054275, 0, 9999, -9999, 1.0, 100, 1, 2.252965, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1216, 1.253411, 0, 9999, -9999, 1.0, 100, \n 1, 67.754469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1217, 0.149386, 0,\n 9999, -9999, 1.0, 100, 1, 35.871617, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1218, 0.003217, 0, 9999, -9999, 1.0, 100, 1, 0.980482, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1219, 2.479626, 0, 9999, -9999, 1.0, 100, \n 1, 12.33953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1220, 3.839093, 0,\n 9999, -9999, 1.0, 100, 1, 30.597849, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1221, 2.695126, 0, 9999, -9999, 1.0, 100, 1, 593.230436, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1222, 7.161228, 0, 9999, -9999, 1.0, 100, \n 1, 211.057769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1225, 1.255556, \n 0, 9999, -9999, 1.0, 100, 1, 34.931481, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1226, 0.28775, 0, 9999, -9999, 1.0, 100, 1, 3.982858, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1228, 0.001483, 0, 9999, -9999, 1.0, 100, \n 1, 3.021367, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1229, 18.31617, 0,\n 9999, -9999, 1.0, 100, 1, 51.244222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1230, 0.000325, 0, 9999, -9999, 1.0, 100, 1, 1.681276, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1231, 1.923913, 0, 9999, -9999, 1.0, 100, \n 1, 33.55478, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1232, 4.63817, 0, \n 9999, -9999, 1.0, 100, 1, 75.075088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1233, 500.196383, 0, 9999, -9999, 1.0, 100, 1, 575.36828, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1235, 8.984787, 0, 9999, -9999, 1.0, \n 100, 1, 9.03734, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1236, \n 81.082668, 0, 9999, -9999, 1.0, 100, 1, 82.225035, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1237, 1.527562, 0, 9999, -9999, 1.0, 100, 1, \n 14.605409, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1238, 4.660084, 0, \n 9999, -9999, 1.0, 100, 1, 188.691049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1239, 1.665042, 0, 9999, -9999, 1.0, 100, 1, 2.267706, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1240, 37.421408, 0, 9999, -9999, 1.0, 100,\n 1, 339.51051, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1241, 18.885145, \n 0, 9999, -9999, 1.0, 100, 1, 385.361595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1242, 1.763094, 0, 9999, -9999, 1.0, 100, 1, 27.074038, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1243, 6.672629, 0, 9999, -9999, 1.0, \n 100, 1, 83.079842, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1244, \n 170.415315, 0, 9999, -9999, 1.0, 100, 1, 323.472536, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1245, 0.893274, 0, 9999, -9999, 1.0, 100, 1, \n 8.080896, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1246, 29.939855, 0, \n 9999, -9999, 1.0, 100, 1, 57.127825, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1247, 0.002877, 0, 9999, -9999, 1.0, 100, 1, 21.833396, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1248, 35.172508, 0, 9999, -9999, 1.0, 100,\n 1, 91.958275, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1249, 6.814281, 0,\n 9999, -9999, 1.0, 100, 1, 76.135177, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1250, 2.321024, 0, 9999, -9999, 1.0, 100, 1, 30.830519, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1251, 0.763016, 0, 9999, -9999, 1.0, 100, \n 1, 23.404345, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1252, 0.866633, 0,\n 9999, -9999, 1.0, 100, 1, 14.887727, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1253, 10.241594, 0, 9999, -9999, 1.0, 100, 1, 64.502694, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1254, 27.7169, 0, 9999, -9999, 1.0, 100, 1,\n 82.278695, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1255, 0.662735, 0, \n 9999, -9999, 1.0, 100, 1, 3.818419, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1256, 2.741448, 0, 9999, -9999, 1.0, 100, 1, 15.091842, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1257, 16.932573, 0, 9999, -9999, 1.0, 100,\n 1, 88.95288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1258, 98.683252, 0,\n 9999, -9999, 1.0, 100, 1, 235.487329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1259, 20.411221, 0, 9999, -9999, 1.0, 100, 1, 109.288719, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1260, 2.232628, 0, 9999, -9999, 1.0, \n 100, 1, 20.168717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1261, \n 1.62711, 0, 9999, -9999, 1.0, 100, 1, 201.699555, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1267, 1.762778, 0, 9999, -9999, 1.0, 100, 1, 39.469006,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1274, 2.637835, 0, 9999, -9999,\n 1.0, 100, 1, 53.095629, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1275, \n 9.287573, 0, 9999, -9999, 1.0, 100, 1, 99.0753, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1276, 1.874038, 0, 9999, -9999, 1.0, 100, 1, 25.655641,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1277, 5.207053, 0, 9999, -9999,\n 1.0, 100, 1, 65.611252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1278, \n 14.540024, 0, 9999, -9999, 1.0, 100, 1, 170.437781, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1282, 0.000138, 0, 9999, -9999, 1.0, 100, 1, \n 4.363037, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1283, 1273.446566, 0,\n 9999, -9999, 1.0, 100, 1, 1297.764428, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1287, 5.392165, 0, 9999, -9999, 1.0, 100, 1, 93.199628, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1288, 6.771115, 0, 9999, -9999, 1.0, \n 100, 1, 148.402692, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1289, \n 2.462386, 0, 9999, -9999, 1.0, 100, 1, 184.149235, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1290, 0.002137, 0, 9999, -9999, 1.0, 100, 1, \n 4.901974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1291, 8.652076, 0, \n 9999, -9999, 1.0, 100, 1, 98.293351, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1292, 0.483448, 0, 9999, -9999, 1.0, 100, 1, 41.682074, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1293, 0.20117, 0, 9999, -9999, 1.0, 100, 1,\n 2.402107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1294, 0.49885, 0, \n 9999, -9999, 1.0, 100, 1, 5.39743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1295, 0.4326, 0, 9999, -9999, 1.0, 100, 1, 5.873666, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [1300, 1.338514, 0, 9999, -9999, 1.0, 100, 1, \n 23.74405, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1301, 3.056681, 0, \n 9999, -9999, 1.0, 100, 1, 60.863304, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1302, 0.036072, 0, 9999, -9999, 1.0, 100, 1, 4.877299, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1303, 0.000754, 0, 9999, -9999, 1.0, 100, \n 1, 4.335516, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1306, 0.392214, 0,\n 9999, -9999, 1.0, 100, 1, 1.827014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1307, 0.070954, 0, 9999, -9999, 1.0, 100, 1, 0.29894, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1308, 0.276331, 0, 9999, -9999, 1.0, 100, 1, \n 3.278321, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1312, 195.684185, 0, \n 9999, -9999, 1.0, 100, 1, 262.264924, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1317, 7.32081, 0, 9999, -9999, 1.0, 100, 1, 23.958574, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1319, 4.498958, 0, 9999, -9999, 1.0, 100, \n 1, 17.708276, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1323, 56.14053, 0,\n 9999, -9999, 1.0, 100, 1, 199.111909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1326, 10.182637, 0, 9999, -9999, 1.0, 100, 1, 56.928865, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1327, 10.455752, 0, 9999, -9999, 1.0, 100,\n 1, 50.796895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1328, 4.010462, 0,\n 9999, -9999, 1.0, 100, 1, 16.063343, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1331, 0.080057, 0, 9999, -9999, 1.0, 100, 1, 0.289238, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1336, 1.009232, 0, 9999, -9999, 1.0, 100, \n 1, 29.773035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1337, 91.485454, \n 0, 9999, -9999, 1.0, 100, 1, 121.31241, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1339, 2.005785, 0, 9999, -9999, 1.0, 100, 1, 10.086482, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1340, 58.628541, 0, 9999, -9999, 1.0, \n 100, 1, 70.098327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1346, \n 32.686762, 0, 9999, -9999, 1.0, 100, 1, 214.719215, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1348, 12.081109, 0, 9999, -9999, 1.0, 100, 1, \n 22.707927, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1349, 28.392849, 0, \n 9999, -9999, 1.0, 100, 1, 42.352342, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1356, 9.77708, 0, 9999, -9999, 1.0, 100, 1, 73.486231, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1357, 7.611677, 0, 9999, -9999, 1.0, 100, \n 1, 56.459913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1359, 4.046937, 0,\n 9999, -9999, 1.0, 100, 1, 70.633589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1360, 4.206305, 0, 9999, -9999, 1.0, 100, 1, 17.135983, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1361, 14.984474, 0, 9999, -9999, 1.0, 100,\n 1, 63.207173, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1362, 19.170113, \n 0, 9999, -9999, 1.0, 100, 1, 79.107216, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1363, 0.001093, 0, 9999, -9999, 1.0, 100, 1, 0.036158, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1364, 0.001363, 0, 9999, -9999, 1.0, \n 100, 1, 0.061068, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1365, 2.2e-05,\n 0, 9999, -9999, 1.0, 100, 1, 0.000456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1366, 0.102023, 0, 9999, -9999, 1.0, 100, 1, 1.229992, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1372, 7.992948, 0, 9999, -9999, 1.0, \n 100, 1, 192.966588, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1373, \n 1.389051, 0, 9999, -9999, 1.0, 100, 1, 35.200257, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1374, 71.380724, 0, 9999, -9999, 1.0, 100, 1, \n 108.220146, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1375, 37.822803, 0,\n 9999, -9999, 1.0, 100, 1, 61.223816, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1376, 69.720118, 0, 9999, -9999, 1.0, 100, 1, 176.213655, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1377, 58.199323, 0, 9999, -9999, 1.0, \n 100, 1, 234.376272, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1378, \n 45.859926, 0, 9999, -9999, 1.0, 100, 1, 246.029906, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1379, 0.14096, 0, 9999, -9999, 1.0, 100, 1, \n 0.805984, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1380, 0.335839, 0, \n 9999, -9999, 1.0, 100, 1, 1.213356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1381, 0.184209, 0, 9999, -9999, 1.0, 100, 1, 1.01257, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1382, 13.813542, 0, 9999, -9999, 1.0, 100, 1,\n 138.839906, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1383, 21.399019, 0,\n 9999, -9999, 1.0, 100, 1, 109.821439, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1384, 1.198594, 0, 9999, -9999, 1.0, 100, 1, 4.669135, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1385, 0.024343, 0, 9999, -9999, 1.0, 100, \n 1, 0.124455, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1386, 0.167167, 0,\n 9999, -9999, 1.0, 100, 1, 0.673858, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1387, 1.066413, 0, 9999, -9999, 1.0, 100, 1, 3.493561, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1388, 0.256908, 0, 9999, -9999, 1.0, 100, \n 1, 0.928188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1389, 0.059104, 0,\n 9999, -9999, 1.0, 100, 1, 0.213536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1390, 1.139446, 0, 9999, -9999, 1.0, 100, 1, 3.732816, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1391, 0.133841, 0, 9999, -9999, 1.0, 100, \n 1, 0.521719, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1392, 4.971503, 0,\n 9999, -9999, 1.0, 100, 1, 19.306386, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1393, 0.252203, 0, 9999, -9999, 1.0, 100, 1, 1.376509, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1394, 0.208445, 0, 9999, -9999, 1.0, 100, \n 1, 1.077886, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1395, 0.013642, 0,\n 9999, -9999, 1.0, 100, 1, 0.073776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1396, 0.00417, 0, 9999, -9999, 1.0, 100, 1, 0.026112, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1397, 8.076645, 0, 9999, -9999, 1.0, 100, 1, \n 25.084545, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1398, 0.940882, 0, \n 9999, -9999, 1.0, 100, 1, 2.779641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1399, 3.713776, 0, 9999, -9999, 1.0, 100, 1, 17.868157, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1400, 0.236741, 0, 9999, -9999, 1.0, 100, \n 1, 1.297197, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1401, 18.486316, 0,\n 9999, -9999, 1.0, 100, 1, 89.339497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1402, 6.374645, 0, 9999, -9999, 1.0, 100, 1, 26.328902, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1403, 46.195768, 0, 9999, -9999, 1.0, 100,\n 1, 119.651672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1404, 54.270904,\n 0, 9999, -9999, 1.0, 100, 1, 134.800518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1405, 6.605109, 0, 9999, -9999, 1.0, 100, 1, 29.550802, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1406, 2.938927, 0, 9999, -9999, 1.0, \n 100, 1, 10.763987, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1407, \n 0.025825, 0, 9999, -9999, 1.0, 100, 1, 0.211614, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1408, 5.740956, 0, 9999, -9999, 1.0, 100, 1, 41.078698,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1409, 1.5687, 0, 9999, -9999, \n 1.0, 100, 1, 12.019786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1410, \n 5.314918, 0, 9999, -9999, 1.0, 100, 1, 37.466518, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1411, 6.47493, 0, 9999, -9999, 1.0, 100, 1, 39.395367,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1412, 0.032719, 0, 9999, -9999,\n 1.0, 100, 1, 5.987601, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1413, \n 0.023058, 0, 9999, -9999, 1.0, 100, 1, 5.679791, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1414, 8.5e-05, 0, 9999, -9999, 1.0, 100, 1, 25.992489,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1418, 13.628823, 0, 9999, -9999,\n 1.0, 100, 1, 88.264613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1419, \n 4.359201, 0, 9999, -9999, 1.0, 100, 1, 33.260903, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1421, 0.026178, 0, 9999, -9999, 0.999529, 100, 1, \n 6.972369, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1422, 0.02267, 0, \n 9999, -9999, 1.0, 100, 1, 4.730495, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1423, 0.003041, 0, 9999, -9999, 1.0, 100, 1, 1.931017, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1424, 205.502248, 0, 9999, -9999, 1.0, 100,\n 1, 219.092115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1425, 7.089408, \n 0, 9999, -9999, 1.0, 100, 1, 21.366402, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1426, 12.895597, 0, 9999, -9999, 1.0, 100, 1, 68.762602, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1427, 51.532387, 0, 9999, -9999, 1.0, \n 100, 1, 480.698671, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1428, \n 32.967042, 0, 9999, -9999, 1.0, 100, 1, 334.885743, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1431, 78.980532, 0, 9999, -9999, 1.0, 100, 1, \n 227.662022, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1432, 6.308304, 0, \n 9999, -9999, 1.0, 100, 1, 12.058931, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1433, 1093.225383, 0, 9999, -9999, 1.0, 100, 1, 1289.241188, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1434, 78.196858, 0, 9999, -9999, 1.0,\n 100, 1, 99.440014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1435, \n 62.720229, 0, 9999, -9999, 1.0, 100, 1, 86.713217, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1436, 53.522086, 0, 9999, -9999, 1.0, 100, 1, \n 98.434116, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1437, 5.612357, 0, \n 9999, -9999, 1.0, 100, 1, 238.321958, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1438, 41.072054, 0, 9999, -9999, 1.0, 100, 1, 392.815158, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1439, 31.289263, 0, 9999, -9999, 1.0, \n 100, 1, 99.103164, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1440, \n 0.082585, 0, 9999, -9999, 1.0, 100, 1, 0.833609, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1443, 58.478089, 0, 9999, -9999, 1.0, 100, 1, \n 103.005076, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1446, 40.070237, 0,\n 9999, -9999, 1.0, 100, 1, 758.547933, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1447, 0.499762, 0, 9999, -9999, 1.0, 100, 1, 89.477411, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1448, 3.354244, 0, 9999, -9999, 1.0, 100, \n 1, 7.523578, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1449, 36.4709, 0, \n 9999, -9999, 1.0, 100, 1, 95.437673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1450, 21.193068, 0, 9999, -9999, 1.0, 100, 1, 59.256809, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1451, 27.993599, 0, 9999, -9999, 1.0, 100,\n 1, 68.198838, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1452, 10.328116, \n 0, 9999, -9999, 1.0, 100, 1, 24.068921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1453, 9.5e-05, 0, 9999, -9999, 1.0, 100, 1, 64.93775, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1454, 5.891023, 0, 9999, -9999, 1.0, 100, \n 1, 155.126607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1455, 0.237412, \n 0, 9999, -9999, 1.0, 100, 1, 0.654438, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1456, 19.998371, 0, 9999, -9999, 1.0, 100, 1, 50.054822, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1457, 0.55431, 0, 9999, -9999, 1.0, 100,\n 1, 2.002672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1458, 0.068144, 0,\n 9999, -9999, 1.0, 100, 1, 0.246199, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1459, 0.968807, 0, 9999, -9999, 1.0, 100, 1, 5.309059, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1460, 5.997406, 0, 9999, -9999, 1.0, 100, \n 1, 101.498473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1461, 4.61741, 0,\n 9999, -9999, 1.0, 100, 1, 17.951737, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1462, 0.616784, 0, 9999, -9999, 1.0, 100, 1, 2.402686, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1463, 0.168278, 0, 9999, -9999, 1.0, 100, \n 1, 0.711207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1464, 8.306476, 0,\n 9999, -9999, 1.0, 100, 1, 218.884211, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1465, 1.336085, 0, 9999, -9999, 1.0, 100, 1, 5.299939, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1466, 2.109579, 0, 9999, -9999, 1.0, 100, \n 1, 5.685017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1467, 0.543693, 0,\n 9999, -9999, 1.0, 100, 1, 2.096155, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1468, 6.402705, 0, 9999, -9999, 1.0, 100, 1, 23.789171, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1469, 9.990425, 0, 9999, -9999, 1.0, 100, \n 1, 65.007467, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1470, 49.691631, \n 0, 9999, -9999, 1.0, 100, 1, 78.965265, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1471, 113.337953, 0, 9999, -9999, 1.0, 100, 1, 159.165074, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1472, 2.427411, 0, 9999, -9999, 1.0,\n 100, 1, 11.980182, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1473, \n 2.100139, 0, 9999, -9999, 1.0, 100, 1, 8.362608, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1474, 0.510586, 0, 9999, -9999, 1.0, 100, 1, 1.398948,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1475, 0.135061, 0, 9999, -9999,\n 1.0, 100, 1, 0.39088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1476, \n 114.968527, 0, 9999, -9999, 1.0, 100, 1, 250.480113, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1477, 2.305445, 0, 9999, -9999, 1.0, 100, 1, \n 12.122974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1482, 0.072524, 0, \n 9999, -9999, 1.0, 100, 1, 17.51083, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1483, 0.822639, 0, 9999, -9999, 1.0, 100, 1, 3.599649, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1484, 0.00601, 0, 9999, -9999, 1.0, 100, 1,\n 0.02991, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1485, 0.113245, 0, \n 9999, -9999, 1.0, 100, 1, 0.563547, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1486, 0.582622, 0, 9999, -9999, 1.0, 100, 1, 2.89934, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1489, 0.028354, 0, 9999, -9999, 1.0, 100, 1, \n 0.118938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1490, 675.613992, 0, \n 9999, -9999, 1.0, 100, 1, 782.463701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1491, 6.722106, 0, 9999, -9999, 1.0, 100, 1, 84.622838, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1492, 10.405171, 0, 9999, -9999, 1.0, 100,\n 1, 229.927503, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1493, 4.588641, \n 0, 9999, -9999, 1.0, 100, 1, 83.557175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1494, 41.935608, 0, 9999, -9999, 1.0, 100, 1, 404.486733, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1495, 7.676729, 0, 9999, -9999, 1.0,\n 100, 1, 66.920717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1500, \n 0.008709, 0, 9999, -9999, 1.0, 100, 1, 0.154817, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1501, 0.00044, 0, 9999, -9999, 1.0, 100, 1, 8.165333, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1503, 0.000454, 0, 9999, -9999,\n 1.0, 100, 1, 45.972187, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1504, \n 0.077847, 0, 9999, -9999, 1.0, 100, 1, 188.822836, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1512, 0.001465, 0, 9999, -9999, 1.0, 100, 1, \n 64.130052, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1513, 0.551953, 0, \n 9999, -9999, 1.0, 100, 1, 23.051786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1518, 0.11666, 0, 9999, -9999, 1.0, 100, 1, 0.670542, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1519, 0.008097, 0, 9999, -9999, 1.0, 100, 1, \n 0.04654, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]'], {}), '([[586, 47.326635, 0, 9999, -9999, 1.0, 100, 1, 272.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [589, 63.1, 0, 9999, -9999, 1.0, 100, 1, 63.1, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [590, 38.0, 0, 9999, -9999, 1.0,\n 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [593, 11.1, 0, \n 9999, -9999, 1.0, 100, 1, 11.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [594, 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [595, 876.332761, 0, 9999, -9999, 1.0, 100, 1, 4730.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [597, 95.0, 0, 9999, -9999, 1.0, 100,\n 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [598, 12.0, 0, 9999, -\n 9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [599, \n 9.3, 0, 9999, -9999, 1.0, 100, 1, 9.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [601, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [602, 24.6, 0, 9999, -9999, 1.0, 100, 1, 24.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [603, 486.918756, 0, 9999, -9999,\n 1.0, 100, 1, 3455.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [607, \n 1800.0, 0, 9999, -9999, 1.0, 100, 1, 1800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [608, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [609, 36.4, 0, 9999, -9999, 1.0, 100, 1, \n 36.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [610, 61.5, 0, 9999, -9999,\n 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [612, 30.0, 0,\n 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [613, 85.0, 0, 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [614, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [616, 29.0, 0, 9999, -9999, 1.0, 100, 1, \n 29.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [617, 137.0, 0, 9999, -\n 9999, 1.0, 100, 1, 137.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [618, \n 33.4, 0, 9999, -9999, 1.0, 100, 1, 33.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [619, 118.0, 0, 9999, -9999, 1.0, 100, 1, 118.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [621, 765.0, 0, 9999, -9999, 1.0, 100, 1, 765.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [623, 760.0, 0, 9999, -9999, 1.0,\n 100, 1, 760.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [624, 27.0, 0, \n 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [628, 449.0, 0, 9999, -9999, 1.0, 100, 1, 449.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [629, 75.3, 0, 9999, -9999, 1.0, 100, 1, 75.3, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [631, 79.8, 0, 9999, -9999, 1.0, 100, 1,\n 79.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [632, 45.1, 0, 9999, -9999,\n 1.0, 100, 1, 45.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [637, 53.7, 0,\n 9999, -9999, 1.0, 100, 1, 53.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [638, 128.7, 0, 9999, -9999, 1.0, 100, 1, 128.7, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [639, 15.8, 0, 9999, -9999, 1.0, 100, 1, 15.8, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [640, 12.0, 0, 9999, -9999, 1.0, 100, 1,\n 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [641, 12.6, 0, 9999, -9999,\n 1.0, 100, 1, 12.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [642, 28.9, 0,\n 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [643, 857.0, 0, 9999, -9999, 1.0, 100, 1, 857.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [646, 103.0, 0, 9999, -9999, 1.0, 100, 1, 103.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [647, 14.0, 0, 9999, -9999, 1.0, 100, 1,\n 14.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [650, 1324.5, 0, 9999, -\n 9999, 1.0, 100, 1, 1324.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [652,\n 46.9, 0, 9999, -9999, 1.0, 100, 1, 46.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [655, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [657, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [658, 95.0, 0, 9999, -9999, 1.0,\n 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [661, 32.7, 0, \n 9999, -9999, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [662, 9.2, 0, 9999, -9999, 1.0, 100, 1, 9.2, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [663, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [666, 28.9, 0, 9999, -9999, 1.0, 100, 1, \n 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [668, 766.0, 0, 9999, -\n 9999, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [670, \n 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [672, 33.1, 0, 9999, -9999, 1.0, 100, 1, 33.1, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [676, 370.0, 0, 9999, -9999, 1.0, 100, 1, 370.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [677, 13.4, 0, 9999, -9999, 1.0,\n 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [678, 1017.0, 0, \n 9999, -9999, 1.0, 100, 1, 1017.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [679, 545.306254, 0, 9999, -9999, 1.0, 100, 1, 695.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [681, 40.1, 0, 9999, -9999, 1.0, 100, 1, 40.1, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [683, 27.5, 0, 9999, -9999, 1.0,\n 100, 1, 27.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [687, 1329.0, 0, \n 9999, -9999, 1.0, 100, 1, 1329.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [689, 310.0, 0, 9999, -9999, 1.0, 100, 1, 310.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [691, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [693, 194.0, 0, 9999, -9999, 1.0, 100, 1,\n 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [694, 16.4, 0, 9999, -\n 9999, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [695, \n 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [696, 721.0, 0, 9999, -9999, 1.0, 100, 1, 721.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [697, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [698, 24.0, 0, 9999, -9999, 1.0,\n 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [699, 104.6, 0, \n 9999, -9999, 1.0, 100, 1, 104.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [700, 27.0, 0, 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [701, 47.2, 0, 9999, -9999, 1.0, 100, 1, 47.2, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [702, 73.4, 0, 9999, -9999, 1.0, 100, 1, \n 73.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [704, 508.0, 0, 9999, -\n 9999, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [705, \n 17.0, 0, 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [707, 34.0, 0, 9999, -9999, 1.0, 100, 1, 34.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [708, 7.8, 0, 9999, -9999, 1.0, 100, 1, 7.8, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [711, 109.981679, 0, 9999, -9999, 1.0,\n 100, 1, 176.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [713, 13.4, 0, \n 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [714, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [716, 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [717, 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [719, 1336.155648, 0, 9999, -\n 9999, 1.0, 100, 1, 1958.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [721,\n 4.0, 0, 9999, -9999, 1.0, 100, 1, 4.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [722, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [723, 19.7, 0, 9999, -9999, 1.0, 100, 1, 19.7, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [724, 12.1, 0, 9999, -9999, 1.0,\n 100, 1, 12.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [725, 800.0, 0, \n 9999, -9999, 1.0, 100, 1, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [726, 126.0, 0, 9999, -9999, 1.0, 100, 1, 126.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [727, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [728, 510.0, 0, 9999, -9999, 1.0, 100, 1,\n 510.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [730, 633.2, 0, 9999, -\n 9999, 1.0, 100, 1, 633.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [731, \n 715.079188, 0, 9999, -9999, 1.0, 100, 1, 895.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [732, 14.6, 0, 9999, -9999, 1.0, 100, 1, 14.6, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [733, 396.6, 0, 9999, -9999, 1.0, 100, 1,\n 396.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [735, 84.8, 0, 9999, -\n 9999, 1.0, 100, 1, 84.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [736, \n 32.0, 0, 9999, -9999, 1.0, 100, 1, 32.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [737, 28.0, 0, 9999, -9999, 1.0, 100, 1, 28.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [738, 138.5, 0, 9999, -9999, 1.0, 100, 1, 138.5, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [739, 59.9, 0, 9999, -9999, 1.0,\n 100, 1, 59.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [741, 214.0, 0, \n 9999, -9999, 1.0, 100, 1, 214.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [742, 9.0, 0, 9999, -9999, 1.0, 100, 1, 9.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [743, 190.321651, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [745, 42.0, 0, 9999, -9999, 1.0, 100,\n 1, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [746, 100.0, 0, 9999, -\n 9999, 1.0, 100, 1, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [747, \n 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [748, 110.0, 0, 9999, -9999, 1.0, 100, 1, 110.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [749, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [750, 90.8, 0, 9999, -9999, 1.0,\n 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [758, 18.5, 0, \n 9999, -9999, 1.0, 100, 1, 18.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [760, 317.678066, 0, 9999, -9999, 1.0, 100, 1, 794.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [761, 15.7, 0, 9999, -9999, 1.0, 100, 1, 15.7, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [762, 1105.0, 0, 9999, -9999, \n 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [763, 20.3,\n 0, 9999, -9999, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [765, 59.0, 0, 9999, -9999, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [767, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [769, 43.3, 0, 9999, -9999, 1.0, 100, 1,\n 43.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [771, 690.0, 0, 9999, -\n 9999, 1.0, 100, 1, 690.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [772, \n 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [774, 33.5, 0, 9999, -9999, 1.0, 100, 1, 33.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [775, 128.0, 0, 9999, -9999, 1.0, 100, 1, 128.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [776, 56.0, 0, 9999, -9999, 1.0,\n 100, 1, 56.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [777, 79.0, 0, \n 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [778, 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [779, 34.2, 0, 9999, -9999, 1.0, 100, 1, 34.2, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [781, 977.621115, 0, 9999, -9999, 1.0, 100,\n 1, 1310.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [784, 780.2167, 0, \n 9999, -9999, 1.0, 100, 1, 1275.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [785, 3.0, 0, 9999, -9999, 1.0, 100, 1, 3.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [786, 195.4, 0, 9999, -9999, 1.0, 100, 1, 195.4, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [787, 778.0, 0, 9999, -9999, 1.0, 100, 1, \n 778.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [788, 875.0, 0, 9999, -\n 9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [789, \n 77.4, 0, 9999, -9999, 1.0, 100, 1, 77.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [791, 10.0, 0, 9999, -9999, 1.0, 100, 1, 10.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [792, 62.7, 0, 9999, -9999, 1.0, 100, 1, 62.7, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [793, 9.8, 0, 9999, -9999, 1.0, \n 100, 1, 9.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [794, 0.2, 0, 9999,\n -9999, 1.0, 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [795, \n 13.6, 0, 9999, -9999, 1.0, 100, 1, 13.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [796, 85.1, 0, 9999, -9999, 1.0, 100, 1, 85.1, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [798, 209.378239, 0, 9999, -9999, 1.0, 100, 1, \n 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [800, 36.5, 0, 9999, -\n 9999, 1.0, 100, 1, 36.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [801, \n 18.60032, 0, 9999, -9999, 1.0, 100, 1, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [802, 500.0, 0, 9999, -9999, 1.0, 100, 1, 500.0, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [805, 875.108011, 0, 9999, -9999, 1.0, 100,\n 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [806, 35.8, 0, 9999, \n -9999, 1.0, 100, 1, 35.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [808, \n 217.5, 0, 9999, -9999, 1.0, 100, 1, 217.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [809, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [811, 25.2, 0, 9999, -9999, 1.0, 100, 1, 25.2,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [814, 89.0, 0, 9999, -9999, 1.0,\n 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [816, 80.1, 0, \n 9999, -9999, 1.0, 100, 1, 80.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [817, 54.0, 0, 9999, -9999, 1.0, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [818, 216.869854, 0, 9999, -9999, 1.0, 100, 1, 757.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [821, 82.5, 0, 9999, -9999, 1.0, 100,\n 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [822, 134.0, 0, 9999, -\n 9999, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [825, \n 42.7, 0, 9999, -9999, 1.0, 100, 1, 42.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [826, 58.0, 0, 9999, -9999, 1.0, 100, 1, 58.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [830, 89.0, 0, 9999, -9999, 1.0, 100, 1, 89.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [833, 18.6, 0, 9999, -9999, 1.0,\n 100, 1, 18.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [834, 23.3, 0, \n 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [835, 63.7, 0, 9999, -9999, 1.0, 100, 1, 63.7, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [836, 25.5, 0, 9999, -9999, 1.0, 100, 1, 25.5, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [837, 472.0, 0, 9999, -9999, 1.0, 100, 1, \n 472.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [839, 73.3, 0, 9999, -\n 9999, 1.0, 100, 1, 73.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [840, \n 90.722956, 0, 9999, -9999, 1.0, 100, 1, 1391.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [841, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [843, 333.0, 0, 9999, -9999, 1.0, 100, 1,\n 333.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [844, 40.0, 0, 9999, -\n 9999, 1.0, 100, 1, 40.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [845, \n 318.0, 0, 9999, -9999, 1.0, 100, 1, 318.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [848, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [849, 779.0, 0, 9999, -9999, 1.0, 100, 1, \n 779.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [850, 16.0, 0, 9999, -\n 9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [851, \n 79.5, 0, 9999, -9999, 1.0, 100, 1, 79.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [852, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [853, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [855, 688.0, 0, 9999, -9999, 1.0,\n 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [856, 36.0, 0, \n 9999, -9999, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [857, 1402.0, 0, 9999, -9999, 1.0, 100, 1, 1402.0, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [858, 56.8, 0, 9999, -9999, 1.0, 100, 1, 56.8, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [859, 85.0, 0, 9999, -9999, 1.0, 100,\n 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [860, 25.0, 0, 9999, -\n 9999, 1.0, 100, 1, 25.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [862, \n 725.0, 0, 9999, -9999, 1.0, 100, 1, 725.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [863, 0.6, 0, 9999, -9999, 1.0, 100, 1, 0.6, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [864, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [865, 11.0, 0, 9999, -9999, 1.0,\n 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [866, 260.44, 0, \n 9999, -9999, 1.0, 100, 1, 260.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [867, 769.0, 0, 9999, -9999, 1.0, 100, 1, 769.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [869, 1360.0, 0, 9999, -9999, 1.0, 100, 1, 1360.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [870, 58.4, 0, 9999, -9999, 1.0, 100,\n 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [872, 22.5, 0, 9999, -\n 9999, 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [873, \n 122.0, 0, 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [874, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [875, 24.4, 0, 9999, -9999, 1.0, 100, 1, 24.4,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [876, 58.4, 0, 9999, -9999, 1.0,\n 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [877, 24.8, 0, \n 9999, -9999, 1.0, 100, 1, 24.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [881, 1001.3, 0, 9999, -9999, 1.0, 100, 1, 1001.3, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [882, 17.4, 0, 9999, -9999, 1.0, 100, 1, 17.4, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [883, 18.0, 0, 9999, -9999, 1.0, 100,\n 1, 18.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [885, 490.0, 0, 9999, -\n 9999, 1.0, 100, 1, 490.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [886, \n 2572.0, 0, 9999, -9999, 1.0, 100, 1, 2572.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [888, 35.1, 0, 9999, -9999, 1.0, 100, 1, 35.1, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [889, 9.5, 0, 9999, -9999, 1.0, 100, 1, 9.5,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [890, 48.0, 0, 9999, -9999, 1.0,\n 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [895, 19.0, 0, \n 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [896, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [897, 56.0, 0, 9999, -9999, 1.0, 100, 1, 56.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [898, 84.6, 0, 9999, -9999, 1.0, 100, 1, \n 84.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [899, 8.5, 0, 9999, -9999,\n 1.0, 100, 1, 8.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [900, 112.6, 0,\n 9999, -9999, 1.0, 100, 1, 112.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [902, 19.5, 0, 9999, -9999, 1.0, 100, 1, 19.5, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [903, 20.1, 0, 9999, -9999, 1.0, 100, 1, 20.1, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [905, 137.3, 0, 9999, -9999, 1.0, 100, 1, \n 137.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [906, 66.0, 0, 9999, -\n 9999, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [907, \n 67.3, 0, 9999, -9999, 1.0, 100, 1, 67.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [909, 36.8, 0, 9999, -9999, 1.0, 100, 1, 36.8, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [913, 74.0, 0, 9999, -9999, 1.0, 100, 1, 74.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [915, 12.0, 0, 9999, -9999, 1.0,\n 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [917, 17.0, 0, \n 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [918, 38.5, 0, 9999, -9999, 1.0, 100, 1, 38.5, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [920, 12.8, 0, 9999, -9999, 1.0, 100, 1, 12.8, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [921, 124.0, 0, 9999, -9999, 1.0, 100, 1, \n 124.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [922, 164.0, 0, 9999, -\n 9999, 1.0, 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [923, \n 146.0, 0, 9999, -9999, 1.0, 100, 1, 146.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [924, 11.7, 0, 9999, -9999, 1.0, 100, 1, 11.7, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [925, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [928, 61.5, 0, 9999, -9999, 1.0,\n 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [931, 217.1, 0, \n 9999, -9999, 1.0, 100, 1, 217.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [934, 181.07236, 0, 9999, -9999, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [935, 23.1, 0, 9999, -9999, 1.0, 100, 1, 23.1, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [936, 104.4, 0, 9999, -9999, 1.0, 100,\n 1, 104.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [937, 30.0, 0, 9999, -\n 9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [939, \n 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [940, 29.6, 0, 9999, -9999, 1.0, 100, 1, 29.6, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [942, 51.9, 0, 9999, -9999, 1.0, 100, 1, 51.9, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [944, 25.4, 0, 9999, -9999, 1.0,\n 100, 1, 25.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [945, 35.0, 0, \n 9999, -9999, 1.0, 100, 1, 35.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [948, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [950, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [952, 31.7, 0, 9999, -9999, 1.0, 100, 1, \n 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [956, 43.862599, 0, 9999, \n -9999, 1.0, 100, 1, 65.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [957, \n 6.0, 0, 9999, -9999, 1.0, 100, 1, 6.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [958, 66.7, 0, 9999, -9999, 1.0, 100, 1, 66.7, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [959, 45.5, 0, 9999, -9999, 1.0, 100, 1, 45.5, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [960, 26.5, 0, 9999, -9999, 1.0,\n 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [963, 780.274633, \n 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [965, 352.0, 0, 9999, -9999, 1.0, 100, 1, 352.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [966, 66.0, 0, 9999, -9999, 1.0, 100, 1, 66.0, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [967, 37.5, 0, 9999, -9999, 1.0, 100,\n 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [968, 54.0, 0, 9999, -\n 9999, 0.999529, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 969, 56.9, 0, 9999, -9999, 0.999529, 100, 1, 56.9, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [971, 20.0, 0, 9999, -9999, 1.0, 100, 1, 20.0, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [972, 390.0, 0, 9999, -9999, 1.0, 100,\n 1, 390.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [973, 1347.0, 0, 9999,\n -9999, 1.0, 100, 1, 1347.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [975,\n 52.5, 0, 9999, -9999, 1.0, 100, 1, 52.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [976, 26.9, 0, 9999, -9999, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [977, 324.0, 0, 9999, -9999, 1.0, 100, 1, 324.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [978, 4.6, 0, 9999, -9999, 1.0, \n 100, 1, 4.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [981, 119.0, 0, \n 9999, -9999, 1.0, 100, 1, 119.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [982, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [983, 44.0, 0, 9999, -9999, 1.0, 100, 1, 44.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [984, 465.0, 0, 9999, -9999, 1.0, 100, 1, \n 465.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [985, 22.0, 0, 9999, -\n 9999, 1.0, 100, 1, 22.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [986, \n 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [987, 164.5, 0, 9999, -9999, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [988, 5.1, 0, 9999, -9999, 1.0, 100, 1, 5.1, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [990, 176.488981, 0, 9999, -9999, 1.0,\n 100, 1, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [993, 392.0, 0, \n 9999, -9999, 1.0, 100, 1, 392.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [994, 33.0, 0, 9999, -9999, 1.0, 100, 1, 33.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [995, 4.2, 0, 9999, -9999, 1.0, 100, 1, 4.2, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [996, 11.5, 0, 9999, -9999, 1.0, 100, 1, 11.5,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [997, 18.8, 0, 9999, -9999, 1.0,\n 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [998, 423.0, 0, \n 9999, -9999, 1.0, 100, 1, 423.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [999, 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [1000, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1002, 9.9, 0, 9999, -9999, 1.0, 100, 1, \n 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1003, 900.0, 0, 9999, -\n 9999, 1.0, 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1007,\n 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1008, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1010, 750.0, 0, 9999, -9999, 1.0, 100, 1, 750.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1011, 18.7, 0, 9999, -9999, 1.0,\n 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1012, 2822.8416, \n 0, 9999, -9999, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1018, 175.9, 0, 9999, -9999, 1.0, 100, 1, 175.9, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [1019, 120.0, 0, 9999, -9999, 1.0, 100, 1, 120.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1023, 0.2, 0, 9999, -9999, 1.0,\n 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1025, 113.6, 0, \n 9999, -9999, 1.0, 100, 1, 113.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1026, 655.6, 0, 9999, -9999, 1.0, 100, 1, 655.6, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1027, 42.143139, 0, 9999, -9999, 1.0, 100, 1, 48.3, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1028, 400.0, 0, 9999, -9999, \n 1.0, 100, 1, 400.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1029, 60.0,\n 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1030, 547.524827, 0, 9999, -9999, 1.0, 100, 1, 1018.0, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1031, 1447.199962, 0, 9999, -9999, 1.0, 100, \n 1, 1447.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1032, 37.130761, 0, \n 9999, -9999, 1.0, 100, 1, 153.510391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1033, 7.986228, 0, 9999, -9999, 1.0, 100, 1, 50.164506, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1034, 26.393282, 0, 9999, -9999, 1.0, 100,\n 1, 84.262779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1035, 10.23952, 0,\n 9999, -9999, 1.0, 100, 1, 49.886469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1036, 12.383139, 0, 9999, -9999, 1.0, 100, 1, 67.223077, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1037, 37.034771, 0, 9999, -9999, 1.0, 100,\n 1, 94.684044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1038, 32.273941, \n 0, 9999, -9999, 1.0, 100, 1, 85.798525, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1039, 16.259191, 0, 9999, -9999, 1.0, 100, 1, 132.724114, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1041, 23.220031, 0, 9999, -9999, 1.0,\n 100, 1, 204.187624, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1042, \n 5.897804, 0, 9999, -9999, 1.0, 100, 1, 52.70053, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1044, 17.843471, 0, 9999, -9999, 1.0, 100, 1, \n 36.163532, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1046, 79.936922, 0, \n 9999, -9999, 1.0, 100, 1, 106.787063, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1047, 7.020261, 0, 9999, -9999, 1.0, 100, 1, 13.029581, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1048, 40.016896, 0, 9999, -9999, 1.0, 100,\n 1, 71.656883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1049, 75.832425, \n 0, 9999, -9999, 1.0, 100, 1, 293.755375, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1050, 2.574285, 0, 9999, -9999, 1.0, 100, 1, 52.781606, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1051, 7.252584, 0, 9999, -9999, 1.0, \n 100, 1, 304.42978, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1052, \n 15.175791, 0, 9999, -9999, 1.0, 100, 1, 20.66869, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1053, 12.346923, 0, 9999, -9999, 1.0, 100, 1, \n 16.368087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1054, 216.322419, 0,\n 9999, -9999, 1.0, 100, 1, 273.855776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1055, 0.30681, 0, 9999, -9999, 1.0, 100, 1, 2.856069, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1056, 53.206333, 0, 9999, -9999, 1.0, 100, 1,\n 603.943953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1057, 100.378169, 0,\n 9999, -9999, 1.0, 100, 1, 426.979979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1058, 117.210047, 0, 9999, -9999, 1.0, 100, 1, 1055.735174, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1059, 43.35606, 0, 9999, -9999, 1.0, \n 100, 1, 414.871332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1060, \n 0.48885, 0, 9999, -9999, 1.0, 100, 1, 10.351632, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1061, 9.89646, 0, 9999, -9999, 1.0, 100, 1, 161.862597,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1062, 0.28386, 0, 9999, -9999, \n 1.0, 100, 1, 2.878561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1063, \n 0.835661, 0, 9999, -9999, 1.0, 100, 1, 8.670916, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1064, 19.595506, 0, 9999, -9999, 1.0, 100, 1, \n 209.786524, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1065, 38.415656, 0,\n 9999, -9999, 1.0, 100, 1, 339.421643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1066, 11.779239, 0, 9999, -9999, 1.0, 100, 1, 134.399019, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1067, 0.006954, 0, 9999, -9999, 1.0, \n 100, 1, 32.653526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1072, \n 53.011687, 0, 9999, -9999, 1.0, 100, 1, 112.606433, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1073, 54.828335, 0, 9999, -9999, 1.0, 100, 1, \n 77.81765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1074, 80.822515, 0, \n 9999, -9999, 1.0, 100, 1, 153.592986, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1077, 0.830976, 0, 9999, -9999, 1.0, 100, 1, 26.120041, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1079, 37.497464, 0, 9999, -9999, 1.0, 100,\n 1, 72.327992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1080, 0.003424, 0,\n 9999, -9999, 1.0, 100, 1, 132.149983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1081, 39.02582, 0, 9999, -9999, 1.0, 100, 1, 405.642115, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1082, 30.101628, 0, 9999, -9999, 1.0, 100,\n 1, 510.054159, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1083, 40.966551,\n 0, 9999, -9999, 1.0, 100, 1, 633.681488, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1084, 34.330508, 0, 9999, -9999, 1.0, 100, 1, 602.719371, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1085, 17.091912, 0, 9999, -9999, 1.0,\n 100, 1, 113.714399, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1086, \n 32.887191, 0, 9999, -9999, 1.0, 100, 1, 225.59917, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1087, 7.499037, 0, 9999, -9999, 1.0, 100, 1, \n 116.66597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1088, 3.179915, 0, \n 9999, -9999, 1.0, 100, 1, 36.782492, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1089, 21.693926, 0, 9999, -9999, 1.0, 100, 1, 384.449592, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1090, 68.505794, 0, 9999, -9999, 1.0, \n 100, 1, 89.140897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1091, \n 30.312336, 0, 9999, -9999, 1.0, 100, 1, 45.7939, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1092, 44.920748, 0, 9999, -9999, 1.0, 100, 1, \n 54.002032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1093, 65.837792, 0, \n 9999, -9999, 1.0, 100, 1, 155.605298, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1094, 0.941636, 0, 9999, -9999, 1.0, 100, 1, 3.759038, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1095, 0.050289, 0, 9999, -9999, 1.0, 100, \n 1, 0.204951, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1096, 29.200726, 0,\n 9999, -9999, 1.0, 100, 1, 84.50612, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1097, 2.055263, 0, 9999, -9999, 1.0, 100, 1, 4.601122, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1098, 34.647847, 0, 9999, -9999, 1.0, 100,\n 1, 71.025499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1099, 166.043557,\n 0, 9999, -9999, 1.0, 100, 1, 290.937198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1101, 35.196912, 0, 9999, -9999, 1.0, 100, 1, 83.930665, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1102, 85.484282, 0, 9999, -9999, 1.0, \n 100, 1, 350.979988, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1103, \n 88.696046, 0, 9999, -9999, 1.0, 100, 1, 245.381701, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1104, 0.046739, 0, 9999, -9999, 1.0, 100, 1, \n 0.206918, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1105, 0.763545, 0, \n 9999, -9999, 1.0, 100, 1, 2.178593, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1106, 0.538044, 0, 9999, -9999, 1.0, 100, 1, 2.289793, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1107, 9.315632, 0, 9999, -9999, 1.0, 100, \n 1, 76.221615, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1108, 46.051867, \n 0, 9999, -9999, 1.0, 100, 1, 320.422751, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1109, 0.188979, 0, 9999, -9999, 1.0, 100, 1, 0.77821, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1110, 0.495025, 0, 9999, -9999, 1.0, 100, \n 1, 1.654557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1111, 9.399663, 0,\n 9999, -9999, 1.0, 100, 1, 89.637993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1112, 17.835069, 0, 9999, -9999, 1.0, 100, 1, 69.53429, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1113, 0.897166, 0, 9999, -9999, 1.0, 100, \n 1, 3.536361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1114, 1.424338, 0,\n 9999, -9999, 1.0, 100, 1, 13.446889, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1115, 14.79839, 0, 9999, -9999, 1.0, 100, 1, 50.575278, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1116, 8.410982, 0, 9999, -9999, 1.0, 100, \n 1, 32.601142, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1117, 22.537982, \n 0, 9999, -9999, 1.0, 100, 1, 90.792541, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1118, 1.619728, 0, 9999, -9999, 1.0, 100, 1, 8.725012, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1119, 10.776742, 0, 9999, -9999, 1.0, \n 100, 1, 43.254023, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1120, \n 0.54849, 0, 9999, -9999, 1.0, 100, 1, 2.416001, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1121, 0.128871, 0, 9999, -9999, 1.0, 100, 1, 0.540589,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1122, 0.355104, 0, 9999, -9999,\n 1.0, 100, 1, 1.462883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1123, \n 0.279445, 0, 9999, -9999, 1.0, 100, 1, 1.464336, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1124, 0.319102, 0, 9999, -9999, 1.0, 100, 1, 1.288283,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1125, 4.97339, 0, 9999, -9999, \n 1.0, 100, 1, 25.818899, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1126, \n 5.785037, 0, 9999, -9999, 1.0, 100, 1, 29.154893, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1127, 36.406066, 0, 9999, -9999, 1.0, 100, 1, \n 105.296621, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1128, 0.968241, 0, \n 9999, -9999, 1.0, 100, 1, 3.06139, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1129, 1.446507, 0, 9999, -9999, 1.0, 100, 1, 4.738747, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1130, 0.288868, 0, 9999, -9999, 1.0, 100, 1, \n 1.025754, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1131, 0.882575, 0, \n 9999, -9999, 1.0, 100, 1, 2.897078, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1132, 0.099503, 0, 9999, -9999, 1.0, 100, 1, 0.359497, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1133, 0.171545, 0, 9999, -9999, 1.0, 100, \n 1, 0.719597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1134, 0.12121, 0, \n 9999, -9999, 1.0, 100, 1, 0.508453, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1135, 1.390811, 0, 9999, -9999, 1.0, 100, 1, 8.117819, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1136, 0.092905, 0, 9999, -9999, 1.0, 100, \n 1, 0.4027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1137, 0.470801, 0, \n 9999, -9999, 1.0, 100, 1, 3.669012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1138, 0.251948, 0, 9999, -9999, 1.0, 100, 1, 1.254278, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1139, 5.083469, 0, 9999, -9999, 1.0, 100, \n 1, 19.822769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1140, 4.060139, 0,\n 9999, -9999, 1.0, 100, 1, 28.389457, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1141, 31.800974, 0, 9999, -9999, 1.0, 100, 1, 119.46456, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1142, 0.257109, 0, 9999, -9999, 1.0, 100, \n 1, 1.215733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1143, 3.996627, 0,\n 9999, -9999, 1.0, 100, 1, 25.239356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1144, 15.96963, 0, 9999, -9999, 1.0, 100, 1, 52.527382, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1145, 124.051607, 0, 9999, -9999, 1.0, 100,\n 1, 175.889627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1146, 0.20533, 0,\n 9999, -9999, 1.0, 100, 1, 0.861317, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1147, 12.694751, 0, 9999, -9999, 1.0, 100, 1, 45.703707, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1148, 6.218355, 0, 9999, -9999, 1.0, 100, \n 1, 17.645529, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1149, 1.909193, 0,\n 9999, -9999, 1.0, 100, 1, 8.556784, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1150, 0.742648, 0, 9999, -9999, 1.0, 100, 1, 3.62256, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1151, 4.837401, 0, 9999, -9999, 1.0, 100, 1, \n 13.036113, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1152, 0.038213, 0, \n 9999, -9999, 1.0, 100, 1, 0.116518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1153, 0.013427, 0, 9999, -9999, 1.0, 100, 1, 0.068788, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1154, 0.031353, 0, 9999, -9999, 1.0, 100, \n 1, 0.160625, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1155, 0.221052, 0,\n 9999, -9999, 1.0, 100, 1, 0.609451, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1156, 4.092172, 0, 9999, -9999, 1.0, 100, 1, 16.022334, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1157, 1.612252, 0, 9999, -9999, 1.0, 100, \n 1, 4.354147, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1158, 0.260133, 0,\n 9999, -9999, 1.0, 100, 1, 1.04304, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1159, 3.421268, 0, 9999, -9999, 1.0, 100, 1, 13.498087, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1160, 92.70123, 0, 9999, -9999, 1.0, 100, \n 1, 238.377761, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1161, 1.447396, \n 0, 9999, -9999, 1.0, 100, 1, 25.263391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1162, 66.108496, 0, 9999, -9999, 1.0, 100, 1, 502.409178, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1163, 39.337378, 0, 9999, -9999, 1.0,\n 100, 1, 330.03194, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1164, \n 48.660165, 0, 9999, -9999, 1.0, 100, 1, 285.625412, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1165, 6.897139, 0, 9999, -9999, 1.0, 100, 1, \n 57.188579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1166, 39.149107, 0, \n 9999, -9999, 1.0, 100, 1, 83.277163, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1167, 1.266099, 0, 9999, -9999, 1.0, 100, 1, 5.05378, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1168, 0.406717, 0, 9999, -9999, 1.0, 100, 1, \n 1.345774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1169, 0.977901, 0, \n 9999, -9999, 1.0, 100, 1, 2.721845, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1170, 0.065614, 0, 9999, -9999, 1.0, 100, 1, 0.26599, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1173, 54.517441, 0, 9999, -9999, 1.0, 100, 1,\n 254.253327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1174, 0.315409, 0, \n 9999, -9999, 1.0, 100, 1, 1.260082, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1175, 0.295585, 0, 9999, -9999, 1.0, 100, 1, 0.855454, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1176, 0.083542, 0, 9999, -9999, 1.0, 100, \n 1, 0.23222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1177, 10.259509, 0,\n 9999, -9999, 1.0, 100, 1, 27.87401, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1178, 0.601094, 0, 9999, -9999, 1.0, 100, 1, 3.167999, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1179, 0.298531, 0, 9999, -9999, 1.0, 100, \n 1, 1.306293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1180, 0.17058, 0, \n 9999, -9999, 1.0, 100, 1, 0.688545, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1181, 40.321989, 0, 9999, -9999, 1.0, 100, 1, 85.739557, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1182, 51.507092, 0, 9999, -9999, 1.0, 100,\n 1, 99.319579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1183, 5.009306, 0,\n 9999, -9999, 1.0, 100, 1, 38.222575, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1184, 0.734019, 0, 9999, -9999, 1.0, 100, 1, 4.219005, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1185, 2.912064, 0, 9999, -9999, 1.0, 100, \n 1, 11.343971, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1186, 11.368637, \n 0, 9999, -9999, 1.0, 100, 1, 38.916368, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1187, 2.579245, 0, 9999, -9999, 1.0, 100, 1, 9.814574, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1188, 63.736435, 0, 9999, -9999, 1.0, \n 100, 1, 179.712741, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1189, \n 3.253623, 0, 9999, -9999, 1.0, 100, 1, 20.261805, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1190, 0.006209, 0, 9999, -9999, 1.0, 100, 1, \n 220.533673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1191, 0.0187, 0, \n 9999, -9999, 1.0, 100, 1, 73.079413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1196, 91.192992, 0, 9999, -9999, 1.0, 100, 1, 160.697956, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1197, 45.154128, 0, 9999, -9999, 1.0, \n 100, 1, 90.592266, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1198, \n 10.781322, 0, 9999, -9999, 1.0, 100, 1, 39.819157, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1199, 129.096066, 0, 9999, -9999, 1.0, 100, 1, \n 201.421956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1200, 40.048819, 0,\n 9999, -9999, 1.0, 100, 1, 56.012408, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1203, 0.000518, 0, 9999, -9999, 1.0, 100, 1, 182.623256, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1204, 6.696653, 0, 9999, -9999, 1.0, 100, \n 1, 47.541821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1211, 0.002325, 0,\n 9999, -9999, 1.0, 100, 1, 18.005229, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1212, 8.9e-05, 0, 9999, -9999, 1.0, 100, 1, 91.171888, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1213, 0.973196, 0, 9999, -9999, 1.0, 100, \n 1, 57.342704, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1214, 3.4e-05, 0,\n 9999, -9999, 1.0, 100, 1, 4.505907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1215, 0.054275, 0, 9999, -9999, 1.0, 100, 1, 2.252965, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1216, 1.253411, 0, 9999, -9999, 1.0, 100, \n 1, 67.754469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1217, 0.149386, 0,\n 9999, -9999, 1.0, 100, 1, 35.871617, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1218, 0.003217, 0, 9999, -9999, 1.0, 100, 1, 0.980482, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1219, 2.479626, 0, 9999, -9999, 1.0, 100, \n 1, 12.33953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1220, 3.839093, 0,\n 9999, -9999, 1.0, 100, 1, 30.597849, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1221, 2.695126, 0, 9999, -9999, 1.0, 100, 1, 593.230436, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1222, 7.161228, 0, 9999, -9999, 1.0, 100, \n 1, 211.057769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1225, 1.255556, \n 0, 9999, -9999, 1.0, 100, 1, 34.931481, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1226, 0.28775, 0, 9999, -9999, 1.0, 100, 1, 3.982858, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1228, 0.001483, 0, 9999, -9999, 1.0, 100, \n 1, 3.021367, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1229, 18.31617, 0,\n 9999, -9999, 1.0, 100, 1, 51.244222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1230, 0.000325, 0, 9999, -9999, 1.0, 100, 1, 1.681276, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1231, 1.923913, 0, 9999, -9999, 1.0, 100, \n 1, 33.55478, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1232, 4.63817, 0, \n 9999, -9999, 1.0, 100, 1, 75.075088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1233, 500.196383, 0, 9999, -9999, 1.0, 100, 1, 575.36828, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1235, 8.984787, 0, 9999, -9999, 1.0, \n 100, 1, 9.03734, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1236, \n 81.082668, 0, 9999, -9999, 1.0, 100, 1, 82.225035, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1237, 1.527562, 0, 9999, -9999, 1.0, 100, 1, \n 14.605409, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1238, 4.660084, 0, \n 9999, -9999, 1.0, 100, 1, 188.691049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1239, 1.665042, 0, 9999, -9999, 1.0, 100, 1, 2.267706, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1240, 37.421408, 0, 9999, -9999, 1.0, 100,\n 1, 339.51051, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1241, 18.885145, \n 0, 9999, -9999, 1.0, 100, 1, 385.361595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1242, 1.763094, 0, 9999, -9999, 1.0, 100, 1, 27.074038, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1243, 6.672629, 0, 9999, -9999, 1.0, \n 100, 1, 83.079842, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1244, \n 170.415315, 0, 9999, -9999, 1.0, 100, 1, 323.472536, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1245, 0.893274, 0, 9999, -9999, 1.0, 100, 1, \n 8.080896, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1246, 29.939855, 0, \n 9999, -9999, 1.0, 100, 1, 57.127825, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1247, 0.002877, 0, 9999, -9999, 1.0, 100, 1, 21.833396, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1248, 35.172508, 0, 9999, -9999, 1.0, 100,\n 1, 91.958275, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1249, 6.814281, 0,\n 9999, -9999, 1.0, 100, 1, 76.135177, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1250, 2.321024, 0, 9999, -9999, 1.0, 100, 1, 30.830519, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1251, 0.763016, 0, 9999, -9999, 1.0, 100, \n 1, 23.404345, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1252, 0.866633, 0,\n 9999, -9999, 1.0, 100, 1, 14.887727, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1253, 10.241594, 0, 9999, -9999, 1.0, 100, 1, 64.502694, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1254, 27.7169, 0, 9999, -9999, 1.0, 100, 1,\n 82.278695, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1255, 0.662735, 0, \n 9999, -9999, 1.0, 100, 1, 3.818419, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1256, 2.741448, 0, 9999, -9999, 1.0, 100, 1, 15.091842, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1257, 16.932573, 0, 9999, -9999, 1.0, 100,\n 1, 88.95288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1258, 98.683252, 0,\n 9999, -9999, 1.0, 100, 1, 235.487329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1259, 20.411221, 0, 9999, -9999, 1.0, 100, 1, 109.288719, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1260, 2.232628, 0, 9999, -9999, 1.0, \n 100, 1, 20.168717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1261, \n 1.62711, 0, 9999, -9999, 1.0, 100, 1, 201.699555, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1267, 1.762778, 0, 9999, -9999, 1.0, 100, 1, 39.469006,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1274, 2.637835, 0, 9999, -9999,\n 1.0, 100, 1, 53.095629, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1275, \n 9.287573, 0, 9999, -9999, 1.0, 100, 1, 99.0753, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1276, 1.874038, 0, 9999, -9999, 1.0, 100, 1, 25.655641,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1277, 5.207053, 0, 9999, -9999,\n 1.0, 100, 1, 65.611252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1278, \n 14.540024, 0, 9999, -9999, 1.0, 100, 1, 170.437781, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1282, 0.000138, 0, 9999, -9999, 1.0, 100, 1, \n 4.363037, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1283, 1273.446566, 0,\n 9999, -9999, 1.0, 100, 1, 1297.764428, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1287, 5.392165, 0, 9999, -9999, 1.0, 100, 1, 93.199628, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1288, 6.771115, 0, 9999, -9999, 1.0, \n 100, 1, 148.402692, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1289, \n 2.462386, 0, 9999, -9999, 1.0, 100, 1, 184.149235, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1290, 0.002137, 0, 9999, -9999, 1.0, 100, 1, \n 4.901974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1291, 8.652076, 0, \n 9999, -9999, 1.0, 100, 1, 98.293351, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1292, 0.483448, 0, 9999, -9999, 1.0, 100, 1, 41.682074, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1293, 0.20117, 0, 9999, -9999, 1.0, 100, 1,\n 2.402107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1294, 0.49885, 0, \n 9999, -9999, 1.0, 100, 1, 5.39743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1295, 0.4326, 0, 9999, -9999, 1.0, 100, 1, 5.873666, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [1300, 1.338514, 0, 9999, -9999, 1.0, 100, 1, \n 23.74405, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1301, 3.056681, 0, \n 9999, -9999, 1.0, 100, 1, 60.863304, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1302, 0.036072, 0, 9999, -9999, 1.0, 100, 1, 4.877299, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1303, 0.000754, 0, 9999, -9999, 1.0, 100, \n 1, 4.335516, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1306, 0.392214, 0,\n 9999, -9999, 1.0, 100, 1, 1.827014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1307, 0.070954, 0, 9999, -9999, 1.0, 100, 1, 0.29894, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1308, 0.276331, 0, 9999, -9999, 1.0, 100, 1, \n 3.278321, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1312, 195.684185, 0, \n 9999, -9999, 1.0, 100, 1, 262.264924, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1317, 7.32081, 0, 9999, -9999, 1.0, 100, 1, 23.958574, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1319, 4.498958, 0, 9999, -9999, 1.0, 100, \n 1, 17.708276, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1323, 56.14053, 0,\n 9999, -9999, 1.0, 100, 1, 199.111909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1326, 10.182637, 0, 9999, -9999, 1.0, 100, 1, 56.928865, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1327, 10.455752, 0, 9999, -9999, 1.0, 100,\n 1, 50.796895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1328, 4.010462, 0,\n 9999, -9999, 1.0, 100, 1, 16.063343, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1331, 0.080057, 0, 9999, -9999, 1.0, 100, 1, 0.289238, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1336, 1.009232, 0, 9999, -9999, 1.0, 100, \n 1, 29.773035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1337, 91.485454, \n 0, 9999, -9999, 1.0, 100, 1, 121.31241, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1339, 2.005785, 0, 9999, -9999, 1.0, 100, 1, 10.086482, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1340, 58.628541, 0, 9999, -9999, 1.0, \n 100, 1, 70.098327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1346, \n 32.686762, 0, 9999, -9999, 1.0, 100, 1, 214.719215, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1348, 12.081109, 0, 9999, -9999, 1.0, 100, 1, \n 22.707927, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1349, 28.392849, 0, \n 9999, -9999, 1.0, 100, 1, 42.352342, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1356, 9.77708, 0, 9999, -9999, 1.0, 100, 1, 73.486231, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1357, 7.611677, 0, 9999, -9999, 1.0, 100, \n 1, 56.459913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1359, 4.046937, 0,\n 9999, -9999, 1.0, 100, 1, 70.633589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1360, 4.206305, 0, 9999, -9999, 1.0, 100, 1, 17.135983, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1361, 14.984474, 0, 9999, -9999, 1.0, 100,\n 1, 63.207173, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1362, 19.170113, \n 0, 9999, -9999, 1.0, 100, 1, 79.107216, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1363, 0.001093, 0, 9999, -9999, 1.0, 100, 1, 0.036158, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1364, 0.001363, 0, 9999, -9999, 1.0, \n 100, 1, 0.061068, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1365, 2.2e-05,\n 0, 9999, -9999, 1.0, 100, 1, 0.000456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1366, 0.102023, 0, 9999, -9999, 1.0, 100, 1, 1.229992, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1372, 7.992948, 0, 9999, -9999, 1.0, \n 100, 1, 192.966588, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1373, \n 1.389051, 0, 9999, -9999, 1.0, 100, 1, 35.200257, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1374, 71.380724, 0, 9999, -9999, 1.0, 100, 1, \n 108.220146, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1375, 37.822803, 0,\n 9999, -9999, 1.0, 100, 1, 61.223816, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1376, 69.720118, 0, 9999, -9999, 1.0, 100, 1, 176.213655, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1377, 58.199323, 0, 9999, -9999, 1.0, \n 100, 1, 234.376272, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1378, \n 45.859926, 0, 9999, -9999, 1.0, 100, 1, 246.029906, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1379, 0.14096, 0, 9999, -9999, 1.0, 100, 1, \n 0.805984, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1380, 0.335839, 0, \n 9999, -9999, 1.0, 100, 1, 1.213356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1381, 0.184209, 0, 9999, -9999, 1.0, 100, 1, 1.01257, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1382, 13.813542, 0, 9999, -9999, 1.0, 100, 1,\n 138.839906, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1383, 21.399019, 0,\n 9999, -9999, 1.0, 100, 1, 109.821439, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1384, 1.198594, 0, 9999, -9999, 1.0, 100, 1, 4.669135, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1385, 0.024343, 0, 9999, -9999, 1.0, 100, \n 1, 0.124455, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1386, 0.167167, 0,\n 9999, -9999, 1.0, 100, 1, 0.673858, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1387, 1.066413, 0, 9999, -9999, 1.0, 100, 1, 3.493561, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1388, 0.256908, 0, 9999, -9999, 1.0, 100, \n 1, 0.928188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1389, 0.059104, 0,\n 9999, -9999, 1.0, 100, 1, 0.213536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1390, 1.139446, 0, 9999, -9999, 1.0, 100, 1, 3.732816, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1391, 0.133841, 0, 9999, -9999, 1.0, 100, \n 1, 0.521719, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1392, 4.971503, 0,\n 9999, -9999, 1.0, 100, 1, 19.306386, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1393, 0.252203, 0, 9999, -9999, 1.0, 100, 1, 1.376509, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1394, 0.208445, 0, 9999, -9999, 1.0, 100, \n 1, 1.077886, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1395, 0.013642, 0,\n 9999, -9999, 1.0, 100, 1, 0.073776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1396, 0.00417, 0, 9999, -9999, 1.0, 100, 1, 0.026112, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1397, 8.076645, 0, 9999, -9999, 1.0, 100, 1, \n 25.084545, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1398, 0.940882, 0, \n 9999, -9999, 1.0, 100, 1, 2.779641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1399, 3.713776, 0, 9999, -9999, 1.0, 100, 1, 17.868157, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1400, 0.236741, 0, 9999, -9999, 1.0, 100, \n 1, 1.297197, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1401, 18.486316, 0,\n 9999, -9999, 1.0, 100, 1, 89.339497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1402, 6.374645, 0, 9999, -9999, 1.0, 100, 1, 26.328902, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1403, 46.195768, 0, 9999, -9999, 1.0, 100,\n 1, 119.651672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1404, 54.270904,\n 0, 9999, -9999, 1.0, 100, 1, 134.800518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1405, 6.605109, 0, 9999, -9999, 1.0, 100, 1, 29.550802, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1406, 2.938927, 0, 9999, -9999, 1.0, \n 100, 1, 10.763987, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1407, \n 0.025825, 0, 9999, -9999, 1.0, 100, 1, 0.211614, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1408, 5.740956, 0, 9999, -9999, 1.0, 100, 1, 41.078698,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1409, 1.5687, 0, 9999, -9999, \n 1.0, 100, 1, 12.019786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1410, \n 5.314918, 0, 9999, -9999, 1.0, 100, 1, 37.466518, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1411, 6.47493, 0, 9999, -9999, 1.0, 100, 1, 39.395367,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1412, 0.032719, 0, 9999, -9999,\n 1.0, 100, 1, 5.987601, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1413, \n 0.023058, 0, 9999, -9999, 1.0, 100, 1, 5.679791, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1414, 8.5e-05, 0, 9999, -9999, 1.0, 100, 1, 25.992489,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1418, 13.628823, 0, 9999, -9999,\n 1.0, 100, 1, 88.264613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1419, \n 4.359201, 0, 9999, -9999, 1.0, 100, 1, 33.260903, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1421, 0.026178, 0, 9999, -9999, 0.999529, 100, 1, \n 6.972369, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1422, 0.02267, 0, \n 9999, -9999, 1.0, 100, 1, 4.730495, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1423, 0.003041, 0, 9999, -9999, 1.0, 100, 1, 1.931017, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1424, 205.502248, 0, 9999, -9999, 1.0, 100,\n 1, 219.092115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1425, 7.089408, \n 0, 9999, -9999, 1.0, 100, 1, 21.366402, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1426, 12.895597, 0, 9999, -9999, 1.0, 100, 1, 68.762602, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1427, 51.532387, 0, 9999, -9999, 1.0, \n 100, 1, 480.698671, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1428, \n 32.967042, 0, 9999, -9999, 1.0, 100, 1, 334.885743, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1431, 78.980532, 0, 9999, -9999, 1.0, 100, 1, \n 227.662022, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1432, 6.308304, 0, \n 9999, -9999, 1.0, 100, 1, 12.058931, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1433, 1093.225383, 0, 9999, -9999, 1.0, 100, 1, 1289.241188, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1434, 78.196858, 0, 9999, -9999, 1.0,\n 100, 1, 99.440014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1435, \n 62.720229, 0, 9999, -9999, 1.0, 100, 1, 86.713217, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1436, 53.522086, 0, 9999, -9999, 1.0, 100, 1, \n 98.434116, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1437, 5.612357, 0, \n 9999, -9999, 1.0, 100, 1, 238.321958, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1438, 41.072054, 0, 9999, -9999, 1.0, 100, 1, 392.815158, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1439, 31.289263, 0, 9999, -9999, 1.0, \n 100, 1, 99.103164, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1440, \n 0.082585, 0, 9999, -9999, 1.0, 100, 1, 0.833609, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1443, 58.478089, 0, 9999, -9999, 1.0, 100, 1, \n 103.005076, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1446, 40.070237, 0,\n 9999, -9999, 1.0, 100, 1, 758.547933, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1447, 0.499762, 0, 9999, -9999, 1.0, 100, 1, 89.477411, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1448, 3.354244, 0, 9999, -9999, 1.0, 100, \n 1, 7.523578, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1449, 36.4709, 0, \n 9999, -9999, 1.0, 100, 1, 95.437673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1450, 21.193068, 0, 9999, -9999, 1.0, 100, 1, 59.256809, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1451, 27.993599, 0, 9999, -9999, 1.0, 100,\n 1, 68.198838, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1452, 10.328116, \n 0, 9999, -9999, 1.0, 100, 1, 24.068921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1453, 9.5e-05, 0, 9999, -9999, 1.0, 100, 1, 64.93775, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1454, 5.891023, 0, 9999, -9999, 1.0, 100, \n 1, 155.126607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1455, 0.237412, \n 0, 9999, -9999, 1.0, 100, 1, 0.654438, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1456, 19.998371, 0, 9999, -9999, 1.0, 100, 1, 50.054822, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1457, 0.55431, 0, 9999, -9999, 1.0, 100,\n 1, 2.002672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1458, 0.068144, 0,\n 9999, -9999, 1.0, 100, 1, 0.246199, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1459, 0.968807, 0, 9999, -9999, 1.0, 100, 1, 5.309059, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1460, 5.997406, 0, 9999, -9999, 1.0, 100, \n 1, 101.498473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1461, 4.61741, 0,\n 9999, -9999, 1.0, 100, 1, 17.951737, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1462, 0.616784, 0, 9999, -9999, 1.0, 100, 1, 2.402686, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1463, 0.168278, 0, 9999, -9999, 1.0, 100, \n 1, 0.711207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1464, 8.306476, 0,\n 9999, -9999, 1.0, 100, 1, 218.884211, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1465, 1.336085, 0, 9999, -9999, 1.0, 100, 1, 5.299939, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1466, 2.109579, 0, 9999, -9999, 1.0, 100, \n 1, 5.685017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1467, 0.543693, 0,\n 9999, -9999, 1.0, 100, 1, 2.096155, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1468, 6.402705, 0, 9999, -9999, 1.0, 100, 1, 23.789171, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1469, 9.990425, 0, 9999, -9999, 1.0, 100, \n 1, 65.007467, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1470, 49.691631, \n 0, 9999, -9999, 1.0, 100, 1, 78.965265, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1471, 113.337953, 0, 9999, -9999, 1.0, 100, 1, 159.165074, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1472, 2.427411, 0, 9999, -9999, 1.0,\n 100, 1, 11.980182, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1473, \n 2.100139, 0, 9999, -9999, 1.0, 100, 1, 8.362608, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1474, 0.510586, 0, 9999, -9999, 1.0, 100, 1, 1.398948,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1475, 0.135061, 0, 9999, -9999,\n 1.0, 100, 1, 0.39088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1476, \n 114.968527, 0, 9999, -9999, 1.0, 100, 1, 250.480113, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1477, 2.305445, 0, 9999, -9999, 1.0, 100, 1, \n 12.122974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1482, 0.072524, 0, \n 9999, -9999, 1.0, 100, 1, 17.51083, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1483, 0.822639, 0, 9999, -9999, 1.0, 100, 1, 3.599649, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1484, 0.00601, 0, 9999, -9999, 1.0, 100, 1,\n 0.02991, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1485, 0.113245, 0, \n 9999, -9999, 1.0, 100, 1, 0.563547, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1486, 0.582622, 0, 9999, -9999, 1.0, 100, 1, 2.89934, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1489, 0.028354, 0, 9999, -9999, 1.0, 100, 1, \n 0.118938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1490, 675.613992, 0, \n 9999, -9999, 1.0, 100, 1, 782.463701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1491, 6.722106, 0, 9999, -9999, 1.0, 100, 1, 84.622838, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1492, 10.405171, 0, 9999, -9999, 1.0, 100,\n 1, 229.927503, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1493, 4.588641, \n 0, 9999, -9999, 1.0, 100, 1, 83.557175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1494, 41.935608, 0, 9999, -9999, 1.0, 100, 1, 404.486733, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1495, 7.676729, 0, 9999, -9999, 1.0,\n 100, 1, 66.920717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1500, \n 0.008709, 0, 9999, -9999, 1.0, 100, 1, 0.154817, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1501, 0.00044, 0, 9999, -9999, 1.0, 100, 1, 8.165333, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1503, 0.000454, 0, 9999, -9999,\n 1.0, 100, 1, 45.972187, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1504, \n 0.077847, 0, 9999, -9999, 1.0, 100, 1, 188.822836, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1512, 0.001465, 0, 9999, -9999, 1.0, 100, 1, \n 64.130052, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1513, 0.551953, 0, \n 9999, -9999, 1.0, 100, 1, 23.051786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1518, 0.11666, 0, 9999, -9999, 1.0, 100, 1, 0.670542, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1519, 0.008097, 0, 9999, -9999, 1.0, 100, 1, \n 0.04654, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])\n', (94113, 158043), False, 'from numpy import array\n'), ((162053, 292905), 'numpy.array', 'array', (['[[586, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [589, 108, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [590, 108, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [593, 112, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [594, 114, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [595, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [597, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [598, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [599, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [601,\n 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [602, 121, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [603, 526, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [607, 127, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [608, 127, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [609, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [610, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [612, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [613, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [614,\n 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [616, 132, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [617, 133, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [618, 133, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [619, 134, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [621, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [623, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [624, 14, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [628, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [629, \n 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [631, 145, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [632, 145, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [637, 148, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [638, 149, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [639, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [640, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [641, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [642, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [643,\n 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [646, 536, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [647, 536, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [650, 166, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [652, 167, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [655, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [657, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [658, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [661, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [662,\n 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [663, 178, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [666, 180, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [668, 183, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [670, 183, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [672, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [676, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [677, 190, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [678, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [679,\n 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [681, 197, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [683, 200, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [687, 202, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [689, 204, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [691, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [693, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [694, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [695, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [696, \n 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [697, 211, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [698, 212, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [699, 213, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [700, 214, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [701, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [702, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [704, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [705, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [707,\n 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [708, 221, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [711, 224, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [713, 225, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [714, 225, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [716, 226, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [717, 227, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [719, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [721, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [722,\n 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [723, 235, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [724, 238, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [725, 239, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [726, 240, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [727, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [728, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [730, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [731, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [732,\n 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [733, 549, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [735, 253, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [736, 256, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [737, 256, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [738, 258, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [739, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [741, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [742, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [743,\n 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [745, 273, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [746, 273, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [747, 273, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [748, 274, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [749, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [750, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [758, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [760, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [761,\n 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [762, 289, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [763, 560, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [765, 560, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [767, 292, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [769, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [771, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [772, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [774, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [775, \n 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [776, 300, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [777, 300, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [778, 300, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [779, 302, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [781, 303, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [784, 563, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [785, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [786, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [787, \n 308, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [788, 311, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [789, 565, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [791, 314, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [792, 316, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [793, 318, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [794, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [795, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [796, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [798,\n 324, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [800, 326, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [801, 327, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [802, 327, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [805, 328, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [806, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [808, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [809, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [811, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [814,\n 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [816, 335, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [817, 571, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [818, 34, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [821, 338, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [822, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [825, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [826, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [830, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [833,\n 348, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [834, 572, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [835, 572, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [836, 572, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [837, 350, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [839, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [840, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [841, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [843, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [844,\n 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [845, 356, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [848, 574, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [849, 574, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [850, 574, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [851, 575, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [852, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [853, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [855, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [856,\n 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [857, 365, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [858, 368, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [859, 368, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [860, 371, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [862, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [863, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [864, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [865, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [866,\n 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [867, 376, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [869, 503, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [870, 503, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [872, 378, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [873, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [874, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [875, 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [876, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [877,\n 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [881, 388, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [882, 388, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [883, 388, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [885, 393, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [886, 394, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [888, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [889, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [890, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [895, \n 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [896, 581, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [897, 403, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [898, 403, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [899, 405, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [900, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [902, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [903, 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [905, 413, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [906,\n 414, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [907, 583, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [909, 417, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [913, 422, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [915, 423, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [917, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [918, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [920, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [921, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [922,\n 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [923, 432, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [924, 433, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [925, 44, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [928, 435, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [931, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [934, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [935, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [936, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [937, \n 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [939, 450, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [940, 451, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [942, 458, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [944, 458, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [945, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [948, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [950, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [952, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [956, \n 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [957, 478, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [958, 478, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [959, 478, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [960, 479, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [963, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [965, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [966, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [967, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [968, 486,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [969, 486, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [971, 51, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [972, 506, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [973, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [975, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [976, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [977, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [978, 491,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [981, 62, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [982, 62, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [983, 62, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [984, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [985, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [986, 64, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [987, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [988, 66,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [990, 67, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [993, 67, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [994, 67, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [995, 509, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [996, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [997, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [998, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [999, \n 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1000, 71, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1002, 71, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1003, 72, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1007, 511, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1008, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1010, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1011, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1012, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1018,\n 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1019, 514, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1023, 515, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1025, 518, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1026, 518, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1027, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1028, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1029, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1030, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1031, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1032, 1,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1033, 3, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1034, 4, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1035, 6, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1036, 7, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [1037, 8, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1038, 9, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [1039, 11, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1041, \n 16, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1042, 17, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1044, 21, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1046, 25, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1047, 27, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1048, 28, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1049, 29, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1050, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1051, 33, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1052,\n 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1053, 35, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1054, 36, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1055, 38, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1056, 39, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1057, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1058, 41, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1059, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1060, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1061,\n 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1062, 47, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1063, 48, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1064, 49, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1065, 50, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1066, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1067, 53, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1072, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1073, 60, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1074,\n 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1077, 65, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1079, 67, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1080, 70, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1081, 71, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1082, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1083, 73, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1084, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1085, 76, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1086,\n 77, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1087, 79, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1088, 80, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1089, 81, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1090, 82, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1091, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1092, 84, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1093, 85, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1094, 88, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1095,\n 89, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1096, 90, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1097, 91, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1098, 92, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1099, 93, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1101, 98, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1102, 101, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1103, 102, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1104, 103, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1105, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1106, \n 109, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1107, 110, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1108, 111, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1109, 112, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1110, 113, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1111, 114, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1112, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1113, 116, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1114, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1115, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1116, \n 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1117, 122, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1118, 126, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1119, 127, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1120, 130, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1121, 131, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1122, 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1123, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1124, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1125, 135, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1126, \n 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1127, 137, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1128, 139, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1129, 140, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1130, 141, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1131, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1132, 144, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1133, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1134, 146, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1135, 147, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1136, \n 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1137, 149, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1138, 150, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1139, 151, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1140, 152, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1141, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1142, 154, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1143, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1144, 158, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1145, 161, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1146, \n 162, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1147, 163, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1148, 164, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1149, 166, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1150, 167, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1151, 168, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1152, 169, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1153, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1154, 171, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1155, 172, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1156, \n 173, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1157, 174, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1158, 175, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1159, 176, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1160, 177, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1161, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1162, 179, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1163, 180, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1164, 181, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1165, 182, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1166, \n 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1167, 185, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1168, 186, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1169, 187, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1170, 188, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1173, 192, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1174, 193, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1175, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1176, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1177, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1178, \n 198, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1179, 199, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1180, 200, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1181, 202, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1182, 203, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1183, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1184, 205, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1185, 206, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1186, 207, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1187, 208, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1188, \n 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1189, 210, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1190, 211, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1191, 212, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1196, 217, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1197, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1198, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1199, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1200, 222, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1203, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1204, \n 226, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1211, 237, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1212, 238, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1213, 239, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1214, 240, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1215, 241, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1216, 242, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1217, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1218, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1219, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1220, \n 251, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1221, 252, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1222, 253, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1225, 256, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1226, 257, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1228, 260, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1229, 263, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1230, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1231, 266, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1232, 267, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1233, \n 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1235, 271, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1236, 272, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1237, 273, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1238, 274, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1239, 275, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1240, 276, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1241, 278, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1242, 281, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1243, 282, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1244, \n 283, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1245, 284, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1246, 285, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1247, 286, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1248, 287, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1249, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1250, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1251, 291, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1252, 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1253, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1254, \n 294, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1255, 295, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1256, 296, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1257, 297, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1258, 298, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1259, 299, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1260, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1261, 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1267, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1274, 321, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1275, \n 322, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1276, 323, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1277, 324, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1278, 325, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1282, 329, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1283, 331, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1287, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1288, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1289, 340, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1290, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1291, \n 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1292, 343, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1293, 344, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1294, 345, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1295, 346, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1300, 353, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1301, 354, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1302, 355, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1303, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1306, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1307, \n 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1308, 363, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1312, 367, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1317, 372, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1319, 374, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1323, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1326, 384, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1327, 385, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1328, 386, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1331, 390, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1336, \n 395, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1337, 396, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1339, 398, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1340, 399, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1346, 407, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1348, 410, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1349, 411, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1356, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1357, 420, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1359, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1360, \n 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1361, 424, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1362, 425, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1363, 426, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1364, 427, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1365, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1366, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1372, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1373, 436, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1374, 437, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1375, \n 438, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1376, 439, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1377, 440, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1378, 441, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1379, 442, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1380, 443, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1381, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1382, 446, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1383, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1384, 448, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1385, \n 449, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1386, 450, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1387, 451, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1388, 453, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1389, 454, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1390, 455, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1391, 456, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1392, 457, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1393, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1394, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1395, \n 460, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1396, 461, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1397, 462, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1398, 463, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1399, 464, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1400, 465, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1401, 466, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1402, 467, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1403, 468, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1404, 469, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1405, \n 470, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1406, 471, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1407, 472, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1408, 473, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1409, 474, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1410, 475, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1411, 476, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1412, 477, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1413, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1414, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1418, \n 483, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1419, 484, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1421, 486, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1422, 487, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1423, 488, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1424, 489, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1425, 490, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1426, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1427, 492, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1428, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1431, \n 496, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1432, 497, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1433, 498, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1434, 499, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1435, 500, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1436, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1437, 502, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1438, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1439, 504, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1440, 505, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1443, \n 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1446, 511, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1447, 512, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1448, 513, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1449, 514, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1450, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1451, 516, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1452, 517, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1453, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1454, 519, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1455, \n 520, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1456, 521, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1457, 522, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1458, 523, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1459, 524, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1460, 525, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1461, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1462, 527, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1463, 528, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1464, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1465, \n 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1466, 531, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1467, 532, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1468, 533, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1469, 534, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1470, 535, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1471, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1472, 537, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1473, 538, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1474, 539, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1475, \n 540, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1476, 541, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1477, 542, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1482, 547, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1483, 548, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1484, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1485, 550, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1486, 551, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1489, 555, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1490, 556, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1491, \n 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1492, 558, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1493, 559, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1494, 560, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1495, 561, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1500, 566, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1501, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1503, 569, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1504, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1512, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1513, \n 579, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1518, 584, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1519, 585, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1, 490, 0, \n 0.01433884297520661, 0.151691958358336, 991.0, 991.0, 991.0, 0, 2, 1, -\n 360, 43.375], [3, 4, 0, 0.006291637811634348, 0.903417549506624, 3423.0,\n 3423.0, 3423.0, 0, 2, 1, -360, 72.681], [491, 6, 0, \n 0.011200661157024791, 0.118492839955776, 991.0, 991.0, 991.0, 0, 2, 1, \n -360, 33.882], [7, 5, 0, 0.005794840720221606, 0.20802058859584005, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.471], [8, 9, 0, \n 0.0024379328254847646, 0.350063268897336, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 28.163], [492, 11, 0, 0.018224793388429753, 0.0482004476327704,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.565], [11, 493, 0, \n 0.030286942148760328, 0.08010209706571599, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 45.809], [492, 493, 0, 0.04521652892561983, 0.11958747011094399, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 68.39], [494, 14, 0, \n 0.012990743801652892, 0.137430291356512, 991.0, 991.0, 991.0, 0, 2, 1, \n -360, 39.297], [13, 15, 0, 0.007681959833795014, 0.27576354266704156, \n 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 44.371], [16, 5, 0, \n 0.006275623268698061, 0.22527950450957998, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 36.248000000000005], [17, 18, 0, 0.04623522622347646, \n 0.9335989000302801, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 200.291], [\n 17, 12, 0, 0.0056020313942728535, 0.113118303398186, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 24.268], [14, 495, 0, 0.0017957024793388433, \n 0.018996904156819597, 991.0, 991.0, 991.0, 0, 1, 1, -360, 5.432], [494,\n 19, 0, 0.010246611570247935, 0.10839986031771602, 991.0, 991.0, 991.0, \n 0, 1, 1, -360, 30.996], [20, 21, 0, 0.005415685595567867, \n 0.19440984828307922, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 31.281], [\n 20, 22, 0, 0.0049706544321329645, 0.713737278110032, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 57.42100000000001], [497, 23, 0, \n 0.002190413223140496, 0.005793146490362, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 3.313], [23, 499, 0, 0.020799669421487598, 0.22004164444829602, \n 991.0, 991.0, 991.0, 0, 1, 1, -360, 62.919], [25, 26, 0, \n 0.00141845567867036, 0.050919084651523595, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 8.193], [25, 22, 0, 0.0035578254847645433, 0.0319293051869808,\n 856.0, 856.0, 856.0, 0, 1, 1, -360, 10.275], [23, 27, 0, \n 0.027738181818181818, 0.073361203699828, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 41.95399999999999], [28, 23, 0, 0.012841652892561981, \n 0.0339632611780132, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.423], [8, 21,\n 0, 0.004948753462603878, 0.17764812836304802, 1711.0, 1711.0, 1711.0, 0,\n 2, 1, -360, 28.584], [9, 29, 0, 0.002212863573407202, \n 0.31774552934092004, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 25.563000000000002], [30, 25, 0, 0.019958795013850415, \n 0.17911796401827998, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 57.641000000000005], [31, 32, 0, 0.0299776084949446, 0.605319030583196,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 129.863], [32, 33, 0, \n 0.016762234533725762, 0.33846927983213604, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 72.61399999999999], [34, 35, 0, 0.001931900826446281, \n 0.020437759184893597, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 5.843999999999999], [35, 36, 0, 0.0008730578512396695, \n 0.0092361605077588, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.641], [490, 6,\n 0, 0.049352066115702475, 0.130525028606764, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 74.645], [37, 10, 0, 0.02404639889196676, 0.485553838251812, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 104.169], [10, 38, 0, \n 0.006848799630657894, 0.13829351176534158, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 29.669], [37, 38, 0, 0.01437834718372576, 1.1613317560186958, \n 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 124.574], [39, 40, 0, \n 0.04521629732222991, 0.913024308337812, 1283.0, 1283.0, 1283.0, 0, 1, 1,\n -360, 195.877], [39, 41, 0, 0.017466989843005543, 0.35269996139852006, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 75.667], [42, 41, 0, \n 0.031145429362880884, 0.6289001042979919, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 134.922], [18, 42, 0, 0.03439750692520776, 0.6945672650962679,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 149.01], [492, 43, 0, \n 0.01819173553719008, 0.192452068436848, 991.0, 991.0, 991.0, 0, 2, 1, -\n 360, 55.03], [44, 45, 0, 0.02562314049586777, 0.067767398802972, 495.0,\n 495.0, 495.0, 0, 1, 1, -360, 38.755], [44, 505, 0, 0.006061487603305785,\n 0.0160312607980052, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.168], [46, 12,\n 0, 0.0014741170360110802, 0.2116687641962416, 3423.0, 3423.0, 3423.0, 0,\n 2, 1, -360, 17.029], [47, 48, 0, 0.005344182825484765, \n 0.01199019212302604, 428.0, 428.0, 428.0, 0, 1, 1, -360, \n 7.7170000000000005], [49, 50, 0, 0.0019151662049861494, \n 0.0171874439892256, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 5.531000000000001], [31, 33, 0, 0.013475992613088641, \n 0.27211225959163604, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 58.378], [\n 31, 51, 0, 0.003518611495844875, 0.5052381383693519, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 40.647], [52, 53, 0, 0.010464421745152355, \n 1.5025884408875438, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 120.885], [\n 52, 54, 0, 0.0076126500461911354, 0.1537174637168, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 32.978], [506, 55, 0, 0.012634380165289257, \n 0.133660287181212, 991.0, 991.0, 991.0, 0, 1, 1, -360, 38.219], [506, \n 507, 0, 0.044157355371900825, 0.11678619613628, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 66.788], [57, 506, 0, 0.004687272727272727, \n 0.049587095736244, 991.0, 991.0, 991.0, 0, 1, 1, -360, 14.179], [57, 58,\n 0, 0.014436363636363634, 0.0381809096340232, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 21.835], [58, 506, 0, 0.019797685950413223, 0.052360391943288,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.944000000000003], [59, 60, 0, \n 0.019407548476454296, 0.174170863885556, 856.0, 856.0, 856.0, 0, 1, 1, \n -360, 56.049], [508, 62, 0, 0.051111404958677685, 0.03379452026753001, \n 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.653], [30, 61, 0, \n 0.03143698060941828, 0.28212765137935203, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 90.79], [63, 506, 0, 0.027457190082644623, 0.072618044249872, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.528999999999996], [13, 64, 0, \n 0.0014816481994459833, 0.2127501654814608, 3423.0, 3423.0, 3423.0, 0, 2,\n 1, -360, 17.116], [65, 66, 0, 0.03778185595567867, 0.7629053006222161, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 163.671], [59, 67, 0, \n 0.0051880193905817175, 0.046559297286324804, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 14.982999999999999], [61, 67, 0, 0.012931440443213295, \n 0.1160517597580644, 856.0, 856.0, 856.0, 0, 1, 1, -360, 37.346], [68, \n 69, 0, 0.011149584487534626, 0.4002427745096039, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 64.4], [70, 69, 0, 0.009625346260387812, \n 0.345526355460808, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 55.596000000000004], [71, 72, 0, 0.008878635734072021, \n 0.318721276477736, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.283], [73,\n 74, 0, 0.012529547553116345, 0.253001288604392, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 54.278], [37, 75, 0, 0.027459141274238225, \n 0.5544652029066119, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 118.95299999999999], [72, 75, 0, 0.006688711911357341, \n 0.240108375006292, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 38.634], [37,\n 72, 0, 0.036222068328739615, 0.7314094881920841, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 156.914], [76, 77, 0, 0.004683777700831025, \n 0.6725445900750401, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 54.107], [77,\n 51, 0, 0.00363183864265928, 0.5214964473447999, 3423.0, 3423.0, 3423.0,\n 0, 2, 1, -360, 41.955], [73, 72, 0, 0.025475069252077563, \n 0.514402082018968, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 110.35799999999999], [18, 40, 0, 0.01302770083102493, 0.26306018504072,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 56.43600000000001], [492, 45, 0,\n 0.0308703030303719, 0.18370114733484796, 743.0, 743.0, 743.0, 0, 1, 1, \n -360, 70.03699999999999], [10, 74, 0, 0.030167359187465374, \n 0.609150547206812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 130.685], [45,\n 511, 0, 0.08203371900826446, 0.05424014819960001, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 62.038000000000004], [78, 32, 0, 0.013458795013850415, \n 0.48313777647302397, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 77.738], [\n 79, 80, 0, 0.0038086911357340715, 0.1367226831743568, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 21.999000000000002], [81, 79, 0, \n 0.010767832409972299, 0.3865388099484561, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 62.195], [34, 82, 0, 0.0015497520661157025, \n 0.00409874294399768, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.344], [83, \n 84, 0, 0.00902611570247934, 0.0238720301499152, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 13.652000000000001], [83, 499, 0, 0.04179570247933885, \n 0.0276350398834796, 248.0, 248.0, 248.0, 0, 1, 1, -360, 31.608], [85, \n 86, 0, 0.00802354570637119, 0.28802563884886, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 46.343999999999994], [87, 86, 0, 0.01904968836565097, \n 0.683837154069184, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 110.031], [88,\n 89, 0, 0.00380297520661157, 0.010058007429140002, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 5.752000000000001], [90, 86, 0, 0.012097818559556786, \n 0.434282055192244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 69.877], [91,\n 86, 0, 9.26246537396122e-05, 0.013299992817559201, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 1.07], [86, 92, 0, 0.0001852493074792244, \n 0.0066499964087796005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.07], [\n 86, 93, 0, 0.008152181440443215, 0.292643346635492, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 47.086999999999996], [94, 86, 0, \n 0.012883829639889197, 0.46249792780547194, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 74.417], [86, 95, 0, 0.010421052631578947, 0.37409026526870803,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 60.192], [513, 517, 0, \n 0.0008733884297520661, 0.0023099144321748, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 1.321], [97, 66, 0, 0.03812777008310249, 0.34217338998058805, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 110.113], [42, 98, 0, \n 0.003091759002770083, 0.44394630230884, 3423.0, 3423.0, 3423.0, 0, 2, 1,\n -360, 35.716], [99, 100, 0, 0.016371537396121884, 0.587698093837988, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 94.56200000000001], [42, 101, 0,\n 0.008165339335180054, 0.29311568282888, 1711.0, 1711.0, 1711.0, 0, 1, 1,\n -360, 47.163000000000004], [102, 42, 0, 0.012403047091412742, \n 0.44523901189173193, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 71.64], [\n 103, 87, 0, 0.007073060941828254, 0.25390556381756, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 40.854], [104, 103, 0, 0.0028852146814404432, \n 0.1035721403291428, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.665], [\n 105, 87, 0, 0.006406682825484765, 0.22998422159488002, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 37.005], [106, 107, 0, 0.005714219759923823, \n 0.11538365264216799, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.754], [\n 108, 107, 0, 0.0025427631578947367, 0.09127896939786201, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 14.687000000000001], [109, 106, 0, \n 0.003030470914127424, 0.10878648330773438, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 17.504], [110, 111, 0, 0.019821849030470913, \n 0.7115558306889919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 114.491], [\n 87, 112, 0, 0.006135907202216068, 0.220264039928212, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 35.441], [113, 87, 0, 0.003981648199445983, \n 0.14293141813921081, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 22.998], [\n 87, 85, 0, 0.011046225761772853, 0.3965324494097, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 63.803000000000004], [110, 114, 0, \n 0.011665339335180056, 0.418757110306188, 1711.0, 1711.0, 1711.0, 0, 1, \n 1, -360, 67.37899999999999], [115, 116, 0, 0.007048925619834712, \n 0.07457124214588401, 991.0, 991.0, 991.0, 0, 1, 1, -360, 21.323], [117,\n 118, 0, 0.005987534626038782, 0.21493782785077598, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 34.584], [117, 119, 0, 0.0038738746537396117, \n 0.5562504472696961, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 44.751000000000005], [117, 120, 0, 0.005886686288088643, \n 0.8452704781039522, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 68.003], [\n 121, 122, 0, 0.0021170360110803325, 0.0759964075574972, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 12.228], [123, 124, 0, 0.0018386426592797783, \n 0.0660027680945204, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 10.62], [125,\n 126, 0, 0.004941135734072022, 0.17737467056702802, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 28.54], [127, 119, 0, 0.0029027008310249305, \n 0.1041998502705648, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.766], [\n 118, 128, 0, 0.007397160664819945, 0.265539950057812, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 42.726000000000006], [121, 119, 0, \n 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 14.743], [530, 527, 0, 0.022726611570247933, \n 0.060106736329903994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 34.374], [125,\n 130, 0, 0.002931440443213297, 0.105231531956442, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 16.932000000000002], [125, 123, 0, 0.0019078081717451524,\n 0.2739425623421336, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 22.039], [\n 131, 132, 0, 0.0035744459833795014, 0.12831385593973843, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 20.646], [133, 123, 0, 0.003864439058171745, \n 0.13872389704704202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 22.320999999999998], [524, 134, 0, 0.008092231404958678, \n 0.08560847143881999, 991.0, 991.0, 991.0, 0, 1, 1, -360, 24.479], [135,\n 136, 0, 0.005242901662049862, 0.1882073282678, 1711.0, 1711.0, 1711.0, \n 0, 1, 1, -360, 30.283], [123, 131, 0, 0.003138331024930748, \n 0.1126583971045252, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.127], [\n 117, 128, 0, 0.010800034626038782, 0.38769479063117196, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 62.381], [137, 521, 0, 0.013832396694214875, \n 0.14633421587532003, 991.0, 991.0, 991.0, 0, 2, 1, -360, 41.843], [531,\n 514, 0, 0.0059504132231404955, 0.035409362037522, 743.0, 743.0, 743.0, \n 0, 1, 1, -360, 13.5], [139, 521, 0, 0.021257520661157023, \n 0.05622132386323199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.152], [140,\n 514, 0, 0.018527603305785127, 0.04900131122836401, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 28.023000000000003], [522, 141, 0, 0.012168595041322314,\n 0.032183175718526795, 495.0, 495.0, 495.0, 0, 1, 1, -360, 18.405], [142,\n 523, 0, 0.007060165289256198, 0.0746901476577608, 991.0, 991.0, 991.0, \n 0, 2, 1, -360, 21.357], [530, 526, 0, 0.020281652892561983, \n 0.053640374808152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.676], [140, \n 532, 0, 0.004669090909090909, 0.0123486871461184, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 7.062], [142, 144, 0, 0.006678126721756199, \n 0.0397397958689204, 743.0, 743.0, 743.0, 0, 1, 1, -360, 15.151], [140, \n 522, 0, 0.020450247933884298, 0.05408627047793199, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 30.930999999999997], [145, 146, 0, 0.028527603305785125,\n 0.07544904460236, 495.0, 495.0, 495.0, 0, 1, 1, -360, 43.148], [147, \n 523, 0, 0.02461289256198347, 0.0650955220034416, 495.0, 495.0, 495.0, 0,\n 2, 1, -360, 37.227], [144, 523, 0, 0.008479338842975206, \n 0.0224259292904064, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.825], [139, \n 523, 0, 0.029245619834710742, 0.0193370088934308, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 22.116999999999997], [140, 141, 0, 0.008362975206611572,\n 0.022118173847506, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 12.649000000000001], [528, 526, 0, 0.015389090909090908, \n 0.0407006573227188, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.276], [528, \n 148, 0, 0.014306115702479338, 0.0378364333712244, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 21.638], [149, 150, 0, 0.013604628099173552, \n 0.035981157661543604, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 20.576999999999998], [145, 528, 0, 0.00320595041322314, \n 0.0084790121737992, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.849], [530, \n 151, 0, 0.013144462809917355, 0.0347641247737036, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 19.881], [524, 152, 0, 0.014598347107438016, \n 0.03860931919944, 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.08], [149, 525,\n 0, 0.016897190082644627, 0.17875695122823998, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 51.114], [139, 514, 0, 0.007824132231404959, \n 0.020693056313687997, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 11.834000000000001], [126, 120, 0, 0.012780297783933518, \n 0.458781387757004, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.819], [530,\n 153, 0, 0.02254545454545455, 0.059627617060924, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 34.1], [528, 147, 0, 0.15786710743801652, 0.104380679149868,\n 248.0, 248.0, 248.0, 0, 1, 1, -360, 119.387], [528, 154, 0, \n 0.006528264462809917, 0.017265779790547203, 495.0, 495.0, 495.0, 0, 2, \n 1, -360, 9.874], [130, 120, 0, 0.01450502077562327, 0.5206947188067639,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 83.781], [528, 155, 0, \n 0.16064132231404957, 0.1062149715341, 248.0, 248.0, 248.0, 0, 1, 1, -\n 360, 121.485], [524, 533, 0, 0.004432727272727273, 0.0468942356109744, \n 991.0, 991.0, 991.0, 0, 1, 1, -360, 13.409], [524, 149, 0, \n 0.0056413223140495865, 0.05968007537478799, 991.0, 991.0, 991.0, 0, 2, \n 1, -360, 17.065], [154, 150, 0, 0.007539173553719007, \n 0.0199394052006688, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 11.402999999999999], [157, 110, 0, 0.009962084487534625, \n 0.357614433044424, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 57.541000000000004], [119, 158, 0, 0.0002490189289012004, \n 0.08045252664623159, 5134.0, 5134.0, 5134.0, 0, 3, 1, -360, 4.315], [\n 159, 60, 0, 0.010967451523545706, 0.0984261617997728, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 31.674], [536, 161, 0, 0.021314380165289255, \n 0.056371704363524, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.238], [115, \n 151, 0, 0.00379404958677686, 0.0401376047510724, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 11.477], [162, 134, 0, 0.0015910743801652895, \n 0.016832124393744, 991.0, 991.0, 991.0, 0, 2, 1, -360, 4.813], [115, \n 526, 0, 0.0037884297520661154, 0.010019537998747198, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 5.73], [138, 87, 0, 0.0011838642659279777, \n 0.16999131006813442, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 13.675999999999998], [123, 163, 0, 0.0022778739612188364, \n 0.08177009602828919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.157], [\n 112, 164, 0, 0.0008672957063711912, 0.12453516639176802, 3423.0, 3423.0,\n 3423.0, 0, 2, 1, -360, 10.019], [112, 165, 0, 0.005989439058171744, \n 0.21500619230086396, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.595], [\n 166, 165, 0, 0.002632790858725762, 0.09451074335350361, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 15.207], [167, 537, 0, 0.00832595041322314, \n 0.08808100664460242, 991.0, 991.0, 991.0, 0, 2, 1, -360, 25.186], [168,\n 104, 0, 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 14.743], [531, 520, 0, 0.016156694214876033, \n 0.042730794079516396, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 24.436999999999998], [139, 520, 0, 0.010682314049586776, \n 0.0282522993797748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.157], [520, \n 169, 0, 0.0011328925619834712, 0.0119849761681232, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 3.427], [168, 105, 0, 0.007340893351800554, \n 0.26352009133553606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.401], [\n 520, 170, 0, 0.005842644628099174, 0.015452470732151198, 495.0, 495.0, \n 495.0, 0, 2, 1, -360, 8.837], [171, 89, 0, 0.005505454545454546, \n 0.058242717567848004, 991.0, 991.0, 991.0, 0, 1, 1, -360, 16.654], [521,\n 172, 0, 0.006304793388429752, 0.06669899780522001, 991.0, 991.0, 991.0,\n 0, 1, 1, -360, 19.072], [123, 173, 0, 0.005247403047091413, \n 0.18836891696656402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.309], [\n 521, 174, 0, 0.013300495867768597, 0.035176796844864404, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 20.117], [37, 39, 0, 0.004338873499549862, \n 0.35044859579205606, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 37.592], [\n 530, 175, 0, 0.013128595041322313, 0.0347221581224188, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 19.857], [530, 176, 0, 0.005685289256198347, \n 0.01503630144005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.599], [88, 530,\n 0, 0.006015867768595041, 0.0159106066755372, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 9.099], [177, 496, 0, 0.018632066115702478, \n 0.19711036673178398, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 56.361999999999995], [178, 525, 0, 0.03106842975206612, \n 0.08216895464241199, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 46.99100000000001], [179, 493, 0, 0.057079669421487594, \n 0.15096278779194802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.333], [180,\n 181, 0, 0.041027438016528923, 0.10850827416682, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 62.053999999999995], [182, 180, 0, 0.00866314049586777, \n 0.09164817200545601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 26.206], [179,\n 181, 0, 0.01957223140495868, 0.051764115772731996, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 29.603], [180, 493, 0, 0.06676561983471074, \n 0.17657993119175203, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 100.98299999999999], [183, 30, 0, 0.0024804362880886427, \n 0.356166349712776, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 28.654], [183,\n 21, 0, 0.0025647506925207757, 0.36827307214930394, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 29.628], [538, 185, 0, 0.018631404958677687, \n 0.0123189607681008, 248.0, 248.0, 248.0, 0, 1, 1, -360, 14.09], [538, \n 89, 0, 0.014509752066115702, 0.038375005396288, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 21.945999999999998], [184, 186, 0, 0.0016554709141274237, \n 0.059427351084826, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 9.562000000000001], [184, 187, 0, 0.002698753462603878, \n 0.09687863927102919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.588], [\n 520, 172, 0, 0.0034188429752066113, 0.0361682589818792, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 10.342], [89, 175, 0, 0.0037309090909090903, \n 0.0098674088877672, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.643], [185, \n 89, 0, 0.005812892561983471, 0.0153737832609196, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 8.792], [89, 188, 0, 0.003108760330578513, \n 0.008221966434607202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.702], [189,\n 190, 0, 0.008599492151454294, 0.17364414688031998, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 37.253], [539, 172, 0, 0.0021570247933884296, \n 0.022819366646419197, 991.0, 991.0, 991.0, 0, 2, 1, -360, 6.525], [504,\n 192, 0, 0.0003084297520661157, 0.00326290713886456, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 0.9329999999999999], [105, 186, 0, 0.003273372576177285,\n 0.1175060580379876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.907], [\n 105, 187, 0, 0.0021712257617728533, 0.0779416868808324, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 12.540999999999999], [539, 193, 0, \n 0.005608595041322314, 0.01483346262541, 495.0, 495.0, 495.0, 0, 1, 1, -\n 360, 8.482999999999999], [187, 194, 0, 4.8649584487534626e-05, \n 0.0069856037041576, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.562], [539,\n 540, 0, 0.004394710743801653, 0.0116230138006708, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.647], [539, 196, 0, 0.00332297520661157, \n 0.008788516227194, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.026], [197, \n 540, 0, 0.004737190082644629, 0.012528794024621601, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 7.165], [110, 198, 0, 0.00018724030470914128, \n 0.02688587333118328, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 2.1630000000000003], [197, 539, 0, 0.009172231404958677, \n 0.024258473063998802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 13.873], [199,\n 537, 0, 0.03612826446280991, 0.0238877676441712, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 27.322], [134, 526, 0, 0.007771239669421488, \n 0.020553167475975197, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 11.754000000000001], [200, 193, 0, 0.0009322314049586776, \n 0.009862163056380801, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.82], [4, \n 201, 0, 0.013726108033240996, 0.49273365914097605, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 79.282], [202, 86, 0, 0.00013365650969529087, \n 0.00479794133417816, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.772], [85,\n 203, 0, 0.0019011426592797783, 0.2729854600553416, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 21.962], [147, 204, 0, 0.0073874380165289254, \n 0.0781523963903056, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 22.346999999999998], [147, 205, 0, 0.005959669421487603, \n 0.00394049369636956, 248.0, 248.0, 248.0, 0, 1, 1, -360, 4.507], [123, \n 206, 0, 0.0005753116343490305, 0.0826091142668064, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 6.646], [537, 207, 0, 0.018456198347107437, \n 0.048812461297776, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.915], [165, \n 208, 0, 0.00414612188365651, 0.14883562055771601, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 23.948], [4, 94, 0, 0.013687673130193905, \n 0.49135394025941603, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 79.06], [4,\n 2, 0, 5.2054478301015697e-05, 0.016817654469309, 5134.0, 5134.0, 5134.0,\n 0, 3, 1, -360, 0.902], [209, 4, 0, 0.0022369286703601107, \n 0.32120104149338397, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 25.840999999999998], [119, 163, 0, 0.003535145429362881, \n 0.12690306230914922, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.419], [\n 210, 3, 0, 0.0003150969529085873, 0.011311208844832242, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 1.82], [99, 211, 0, 0.0035045013850415513, \n 0.1258030161741948, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.242], [99,\n 69, 0, 0.021717970914127423, 0.7796219621557, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 125.443], [212, 99, 0, 0.008453774238227147, \n 0.30346978938770003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 48.82899999999999], [213, 214, 0, 0.01490115702479339, \n 0.15764073118032798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 45.076], [510,\n 215, 0, 0.002174710743801653, 0.09202587186721281, 1981.0, 1981.0, \n 1981.0, 0, 4, 1, -360, 13.157], [128, 69, 0, 0.010711651662049862, \n 1.538088234801848, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 123.741], [\n 216, 69, 0, 0.009628462603878117, 1.3825528982351443, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 111.228], [217, 98, 0, 0.0012787396121883656, \n 0.045903620070299994, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 7.386], [\n 504, 218, 0, 0.027480991735537193, 0.072680994226412, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 41.565], [177, 504, 0, 0.07054809917355372, \n 0.18658373169634002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 106.704], [219,\n 209, 0, 0.003938798476454294, 0.5655728721401839, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 45.501000000000005], [219, 220, 0, \n 0.0013026315789473684, 0.1870451326342096, 3423.0, 3423.0, 3423.0, 0, 2,\n 1, -360, 15.048], [94, 95, 0, 0.01070740997229917, 0.38436979242743197,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 61.846000000000004], [159, 221, \n 0, 0.009937153739612188, 0.356719480257712, 1711.0, 1711.0, 1711.0, 0, \n 2, 1, -360, 57.397], [34, 161, 0, 0.010965289256198347, \n 0.116002818645824, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.17], [222, \n 221, 0, 0.0046457756232686975, 0.16677196601221997, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 26.834], [211, 52, 0, 0.05267313019390582, \n 0.472709090515552, 856.0, 856.0, 856.0, 0, 1, 1, -360, 152.12], [215, \n 223, 0, 0.04873190082644628, 0.128884831985184, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 73.707], [224, 215, 0, 0.019086280991735535, \n 0.050478887076288004, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 28.868000000000002], [225, 224, 0, 0.04200925619834711, \n 0.11110496071615601, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 63.538999999999994], [224, 223, 0, 0.031061818181818183, \n 0.082151468537468, 495.0, 495.0, 495.0, 0, 1, 1, -360, 46.981], [226, 6,\n 0, 0.06420099173553719, 0.0424492677936932, 248.0, 248.0, 248.0, 0, 1, \n 1, -360, 48.552], [7, 3, 0, 0.009332929362880887, 0.335029305054692, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 53.907], [216, 227, 0, \n 0.01989941135734072, 0.7143401282507, 1711.0, 1711.0, 1711.0, 0, 1, 1, \n -360, 114.939], [228, 229, 0, 0.010545454545454545, 0.027890337012274, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.95], [227, 230, 0, \n 0.003993074792243767, 0.573366419334696, 3423.0, 3423.0, 3423.0, 0, 2, \n 1, -360, 46.128], [231, 53, 0, 0.007193213296398893, 1.0328749562310842,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 83.096], [544, 545, 0, \n 0.013061818181818181, 0.034545548464856, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 19.756], [234, 235, 0, 0.04608859504132231, 0.121893887321888, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 69.709], [546, 214, 0, \n 0.057025454545454546, 0.15081940173295602, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 86.251], [233, 227, 0, 0.0029001038781163438, 0.1041066260218888,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.750999999999998], [237, 238, \n 0, 0.026324628099173554, 0.06962267451304, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 39.816], [212, 100, 0, 0.007955505540166205, 0.285583163531816, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 45.951], [519, 239, 0, \n 0.01740429752066116, 0.046030422038308406, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 26.324], [238, 519, 0, 0.015166280991735538, 0.040111375593995205,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.939], [213, 240, 0, \n 0.01665388429752066, 0.04404574915373599, 1200.0, 1200.0, 1200.0, 0, 1,\n 1, -360, 25.189], [241, 242, 0, 0.009862015235457064, \n 0.3540221919932281, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 56.963], [70,\n 241, 0, 0.003819858033240997, 0.5484941897752321, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 44.126999999999995], [509, 213, 0, \n 0.011363636363636364, 0.120216969880216, 991.0, 991.0, 991.0, 0, 2, 1, \n -360, 34.375], [68, 243, 0, 0.003611668975069252, 0.1296500701715312, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.861], [243, 244, 0, \n 0.0007699099722991691, 0.027637882270859202, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 4.447], [68, 244, 0, 0.004104051246537396, \n 0.147325387728876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.705], [544,\n 547, 0, 0.02418776859504132, 0.255884661882476, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 73.168], [245, 227, 0, 0.012676419667590028, \n 0.45505241780707606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.219], [\n 246, 208, 0, 0.0010155817174515235, 0.0364568961999408, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 5.8660000000000005], [112, 208, 0, \n 0.0017927631578947367, 0.0643558063672372, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 10.355], [165, 247, 0, 0.0002113919667590028, \n 0.0075884538459086, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 1.2209999999999999], [537, 549, 0, 0.00032066115702479337, \n 0.00084807607842936, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.485], [537, \n 550, 0, 0.00032198347107438016, 0.0008515732993697601, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.48700000000000004], [537, 551, 0, \n 0.0002651239669421488, 0.0007011927988648, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 0.401], [110, 251, 0, 0.00023857340720221602, \n 0.008564200982522441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 1.3780000000000001], [510, 252, 0, 0.08467702479338843, \n 0.055987884365424005, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 64.03699999999999], [529, 253, 0, 0.04859504132231405, \n 0.12852286961777998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.5], [237, \n 239, 0, 0.03309421487603306, 0.08752669712542799, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 50.055], [254, 238, 0, 0.07815008264462811, \n 0.05167231372274401, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 59.101000000000006], [69, 255, 0, 0.0009369806094182826, \n 0.134541235754472, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 10.824000000000002], [510, 225, 0, 0.021953719008264466, \n 0.232250442756508, 991.0, 991.0, 991.0, 0, 1, 1, -360, 66.41], [256, \n 257, 0, 0.010125619834710746, 0.0267799693631888, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 15.315], [258, 190, 0, 0.011717451523545707, \n 0.10515695255750121, 856.0, 856.0, 856.0, 0, 1, 1, -360, 33.84], [258, \n 259, 0, 0.015782548476454293, 0.1416387085570408, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 45.58], [260, 261, 0, 0.006791031855955679, \n 0.9751256416231477, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 78.45], [554,\n 553, 0, 0.17583338842975205, 0.11625986438453201, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 132.974], [515, 263, 0, 0.006987107438016529, \n 0.0739172618295936, 991.0, 991.0, 991.0, 0, 2, 1, -360, 21.136], [14, \n 264, 0, 0.01700694214876033, 0.17991802858084, 991.0, 991.0, 991.0, 0, \n 1, 1, -360, 51.446000000000005], [116, 555, 0, 0.0009768595041322315, \n 0.0103342878835768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.955], [151, \n 116, 0, 0.007244958677685951, 0.0191612735410668, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 10.958], [111, 114, 0, 0.008806613573407202, \n 0.3161358573133961, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.867], [77,\n 111, 0, 0.00288452216066482, 0.41418912211817605, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 33.321999999999996], [266, 525, 0, \n 0.01042909090909091, 0.027582581569373602, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 15.774000000000001], [267, 120, 0, 0.013136945983379503, \n 0.471584184581432, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 75.87899999999999], [268, 269, 0, 0.0010327272727272726, \n 0.0027313295556817604, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 1.5619999999999998], [556, 271, 0, 0.052289586776859506, \n 0.0345735262323792, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 39.544000000000004], [556, 272, 0, 0.04685355371900827, \n 0.030979257409249603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 35.433], [529,\n 273, 0, 0.0034604958677685953, 0.009152227205140799, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 5.234], [128, 274, 0, 0.0029350761772853184, \n 0.1053620459045884, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.953], [34,\n 275, 0, 0.0008290909090909092, 0.00054818938265696, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 0.627], [503, 276, 0, 0.006707438016528925, \n 0.07095861291266, 991.0, 991.0, 991.0, 0, 2, 1, -360, 20.29], [503, 504,\n 0, 0.06432727272727272, 0.680524223098808, 991.0, 991.0, 991.0, 0, 2, 1,\n -360, 194.59], [177, 218, 0, 0.04330380165289256, 0.114528740018308, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 65.497], [277, 278, 0, \n 0.007191135734072023, 1.032576638635032, 3423.0, 3423.0, 3423.0, 0, 2, \n 1, -360, 83.072], [557, 558, 0, 0.04341289256198347, 0.258338836678648,\n 743.0, 743.0, 743.0, 0, 1, 1, -360, 98.493], [557, 559, 0, \n 0.03415867768595042, 0.09034195998366001, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 51.665], [559, 558, 0, 0.04474314049586777, 0.11833546501370001, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 67.67399999999999], [277, 78, 0, \n 0.03585768698060942, 0.32180078416049196, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 103.557], [277, 279, 0, 0.021390927977839334, 0.191970480441328, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 61.777], [78, 279, 0, \n 0.015811980609418283, 0.1419028439283376, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 45.665], [281, 282, 0, 0.0023178670360110803, 0.08320574945862161,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.388], [283, 161, 0, \n 0.036741157024793386, 0.09717203248350399, 495.0, 495.0, 495.0, 0, 2, 1,\n -360, 55.571000000000005], [268, 161, 0, 0.018883636363636366, \n 0.199771751868832, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 57.123000000000005], [256, 284, 0, 0.010755371900826446, \n 0.113782083346976, 991.0, 991.0, 991.0, 0, 2, 1, -360, 32.535], [515, \n 516, 0, 0.04071140495867769, 0.107672438361532, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 61.576], [263, 516, 0, 0.0030355371900826445, \n 0.128452925198488, 1981.0, 1981.0, 1981.0, 0, 2, 1, -360, 18.365], [516,\n 285, 0, 0.006908429752066116, 0.018271230811372, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 10.449000000000002], [63, 286, 0, 0.019088925619834708, \n 0.050485881518556, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.872], [287, \n 516, 0, 0.01732892561983471, 0.011457770111127998, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 13.105], [8, 102, 0, 0.015100069252077563, \n 0.542055501663692, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 87.21799999999999], [8, 101, 0, 0.019246883656509697, 0.69091598202144,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 111.17], [80, 288, 0, \n 0.007984072022160666, 0.2866086302684072, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 46.11600000000001], [80, 289, 0, 0.0003782317636201524, \n 0.122198345223416, 5134.0, 5134.0, 5134.0, 0, 4, 1, -360, \n 6.553999999999999], [276, 560, 0, 0.01778314049586777, \n 0.047032375838192794, 495.0, 495.0, 495.0, 0, 2, 1, -360, 26.897], [37,\n 290, 0, 0.005629501385041551, 0.4546919507138321, 2567.0, 2567.0, \n 2567.0, 0, 2, 1, -360, 48.773999999999994], [290, 74, 0, \n 0.02071595106187673, 1.673216783321968, 2567.0, 2567.0, 2567.0, 0, 2, 1,\n -360, 179.483], [512, 291, 0, 0.0053299173553719, 0.056385693247479204,\n 991.0, 991.0, 991.0, 0, 2, 1, -360, 16.123], [78, 292, 0, \n 0.0058149815327908595, 0.469673087481408, 2567.0, 2567.0, 2567.0, 0, 2,\n 1, -360, 50.381], [199, 548, 0, 0.0015530578512396695, \n 0.00410748599634868, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.349], [491, \n 293, 0, 0.014176528925619833, 0.009373426429729999, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 10.720999999999998], [4, 294, 0, 9.669321329639889e-05, \n 0.013884198109531681, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 1.117], [\n 490, 541, 0, 0.050580495867768596, 0.133773946861896, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 76.503], [491, 295, 0, 0.010613553719008264, \n 0.028070443890777202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.053], [491,\n 296, 0, 0.004400661157024794, 0.0116387512948784, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.656000000000001], [295, 297, 0, 0.020297520661157024, \n 0.053682341459340005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.7], [508, \n 161, 0, 0.023239669421487603, 0.061463658055360006, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 35.15], [117, 123, 0, 0.005876211911357341, \n 0.21094161505628, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.941], [133,\n 117, 0, 0.004469182825484764, 0.0401081792747688, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 12.907], [71, 74, 0, 0.03904524469065097, \n 0.7884161162841721, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 169.144], [\n 74, 278, 0, 0.0077122576177285325, 1.10740463560792, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 89.09200000000001], [298, 515, 0, \n 0.021701157024793388, 0.05739464148919599, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 32.823], [5, 299, 0, 0.0016232686980609415, 0.058271370400665996,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 9.376], [32, 292, 0, \n 0.009679362880886427, 0.34746541983297996, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 55.908], [5, 29, 0, 0.00743395083102493, 1.0674425076571843, \n 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 85.87700000000001], [503, 560, 0,\n 0.015140495867768593, 0.160172719142436, 991.0, 991.0, 991.0, 0, 1, 1, \n -360, 45.8], [300, 301, 0, 0.004892053324099723, 0.7024509290644521, \n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 56.513000000000005], [51, 300, 0,\n 0.002573493767313019, 0.3695284920307039, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 29.729], [244, 302, 0, 0.007714508310249307, 1.107727813004004,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 89.118], [31, 302, 0, \n 0.004369113573407203, 0.6273619041941161, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 50.472], [51, 282, 0, 0.006288434903047093, 0.9029576432132521,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 72.64399999999999], [303, 304, 0,\n 8.795013850415512e-05, 0.000789298639172312, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 0.254], [305, 304, 0, 0.003881117266849031, 0.0783689646873844,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 16.813], [305, 259, 0, 0.0025625,\n 0.36794989475177603, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 29.601999999999997], [306, 307, 0, 0.03223268698060942, \n 0.289268628831688, 856.0, 856.0, 856.0, 0, 1, 1, -360, 93.088], [305, \n 308, 0, 0.0024272853185595567, 0.0217833994511184, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 7.01], [305, 309, 0, 0.011014773776523545, \n 0.22241441259921202, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 47.716], [\n 310, 309, 0, 0.009565962603878117, 0.343394627639832, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 55.253], [306, 309, 0, 0.035333795013850415, \n 0.31709917455019604, 856.0, 856.0, 856.0, 0, 1, 1, -360, 102.044], [311,\n 280, 0, 0.003433691135734072, 0.1232611016590444, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 19.833], [280, 278, 0, 0.009749769159764544, \n 0.7874838737974121, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, \n 84.47200000000001], [311, 32, 0, 0.01205909510619806, \n 0.9740069506375919, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 104.48], [13,\n 312, 0, 0.0043324965373961214, 0.622104056565324, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 50.049], [313, 314, 0, 0.006092624653739613, \n 0.218710302449316, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.191], [312,\n 313, 0, 0.00893957756232687, 0.32090893884734, 1711.0, 1711.0, 1711.0, \n 0, 1, 1, -360, 51.635], [547, 566, 0, 0.027035702479338848, \n 0.286013220297816, 991.0, 991.0, 991.0, 0, 1, 1, -360, 81.783], [245, \n 315, 0, 0.014162569252077564, 0.508401547875772, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 81.803], [312, 316, 0, 8.803670360110802e-05, \n 0.01264120812658816, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.0170000000000001], [312, 314, 0, 0.005339854570637119, \n 0.191687700220296, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 30.843000000000004], [554, 546, 0, 0.08174743801652892, \n 0.21620344446439202, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 123.64299999999999], [262, 216, 0, 0.042641966759002774, \n 0.38268554099981195, 856.0, 856.0, 856.0, 0, 1, 1, -360, 123.15], [317,\n 233, 0, 0.005647276084951523, 0.114031901035644, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 24.464000000000002], [318, 317, 0, 0.008311634349030471,\n 0.16783161497270002, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 36.006], [\n 231, 52, 0, 0.035263677285318554, 1.2658796434850879, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 203.683], [319, 567, 0, 0.006089586776859504, \n 0.0644223069721, 991.0, 991.0, 991.0, 0, 1, 1, -360, 18.421], [557, 321,\n 0, 0.010004628099173555, 0.10583989458750401, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 30.264], [277, 65, 0, 0.009430170821779778, 0.7616700793261759,\n 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 81.703], [322, 288, 0, \n 0.006545013850415513, 0.528637424797136, 2567.0, 2567.0, 2567.0, 0, 2, \n 1, -360, 56.706], [322, 323, 0, 0.0018503000923372577, 0.14944779312484,\n 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 16.031], [277, 324, 0, \n 0.019719529085872576, 0.39818407235049996, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 85.425], [324, 325, 0, 0.01103508771932133, \n 0.22282459929396403, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 47.803999999999995], [277, 325, 0, 0.008665743305609418, \n 0.174981914850048, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 37.54], [326,\n 327, 0, 0.007654214876033058, 0.0202436634226288, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 11.577], [328, 326, 0, 0.10300958677685952, \n 0.068109252150368, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 77.90100000000001], [328, 327, 0, 0.09827173553719008, \n 0.064976616491468, 248.0, 248.0, 248.0, 0, 1, 1, -360, 74.318], [326, \n 329, 0, 0.028062148760330575, 0.07421802283046801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 42.443999999999996], [568, 329, 0, 0.05699900826446282, \n 0.15074945731414802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.211], [568,\n 326, 0, 0.03218644628099173, 0.08512585494846397, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 48.681999999999995], [332, 78, 0, 0.006471029547541551, \n 0.522661750455416, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 56.065], [333,\n 306, 0, 0.008580159279778392, 0.308006702824228, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 49.559], [332, 333, 0, 0.007504674515235457, \n 0.26939943395502003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 43.347], [\n 332, 334, 0, 0.017124653739612188, 0.15368328149175597, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 49.456], [66, 334, 0, 0.030625, \n 0.27484062260471603, 856.0, 856.0, 856.0, 0, 1, 1, -360, 88.445], [330,\n 335, 0, 0.00550536703601108, 0.790516769355108, 3423.0, 3423.0, 3423.0,\n 0, 1, 1, -360, 63.598], [336, 66, 0, 0.015054362880886425, \n 0.1351036887216764, 856.0, 856.0, 856.0, 0, 1, 1, -360, 43.477], [330, \n 336, 0, 0.039036357340720224, 0.350327404269788, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 112.73700000000001], [68, 70, 0, 0.016314058171745152, \n 0.14640868261713597, 856.0, 856.0, 856.0, 0, 1, 1, -360, 47.115], [509,\n 337, 0, 0.03494082644628099, 0.09241056617056001, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 52.848], [324, 288, 0, 0.012627423822714683, \n 0.11332339674541761, 856.0, 856.0, 856.0, 0, 1, 1, -360, 36.468], [338,\n 559, 0, 0.009228099173553718, 0.097624922595552, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 27.915], [339, 559, 0, 0.03560595041322315, \n 0.023542417076125203, 248.0, 248.0, 248.0, 0, 1, 1, -360, 26.927], [339,\n 340, 0, 0.08711537190082644, 0.23040041287850396, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 131.762], [559, 340, 0, 0.20983272727272728, \n 0.138740000599684, 248.0, 248.0, 248.0, 0, 1, 1, -360, 158.686], [341, \n 292, 0, 0.0009329409048961218, 0.07535316024134399, 2567.0, 2567.0, \n 2567.0, 0, 1, 1, -360, 8.083], [557, 342, 0, 0.006019834710743802, \n 0.0636843933534336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 18.21], [558, \n 343, 0, 0.010650247933884296, 0.11266996708783199, 991.0, 991.0, 991.0,\n 0, 1, 1, -360, 32.217], [502, 340, 0, 0.021737520661157025, \n 0.22996326026071198, 991.0, 991.0, 991.0, 0, 2, 1, -360, 65.756], [72, \n 32, 0, 0.00675502077562327, 0.969954803293024, 3423.0, 3423.0, 3423.0, \n 0, 2, 1, -360, 78.03399999999999], [344, 345, 0, 0.0005762927054480609,\n 0.04654686738645321, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 4.993], [\n 346, 47, 0, 0.0011340027700831024, 0.04070792194158799, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 6.55], [46, 47, 0, 0.0008975069252077563, \n 0.0322183003580208, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.184], [346,\n 345, 0, 0.0007217797783933517, 0.025910126194627202, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 4.169], [347, 328, 0, 0.029905454545454544, \n 0.07909314882361201, 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.232], [347,\n 348, 0, 0.04883438016528925, 0.129155866607944, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 73.862], [571, 348, 0, 0.041548429752066116, \n 0.10988617921762801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 62.842], [347,\n 572, 0, 0.016052231404958678, 0.04245451362512801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 24.279], [571, 570, 0, 0.17379041322314048, \n 0.11490906279551602, 248.0, 248.0, 248.0, 0, 1, 1, -360, 131.429], [14,\n 350, 0, 0.02166743801652892, 0.05730546235524, 495.0, 495.0, 495.0, 0, \n 1, 1, -360, 32.772], [350, 573, 0, 0.026277685950413226, \n 0.06949852316919598, 495.0, 495.0, 495.0, 0, 1, 1, -360, 39.745], [15, \n 351, 0, 0.02639265927977839, 0.236857956201204, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 76.222], [352, 15, 0, 0.0015260560941828254, \n 0.219126704094076, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.629], [15,\n 335, 0, 0.0035338758079432133, 1.1417173740880242, 5134.0, 5134.0, \n 5134.0, 0, 1, 1, -360, 61.235], [232, 227, 0, 5.5747922437673134e-05, \n 0.000500303468136644, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 0.161], [\n 565, 544, 0, 0.0394803305785124, 0.10441652566461601, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 59.714], [235, 567, 0, 0.02391404958677686, \n 0.25298896294275997, 991.0, 991.0, 991.0, 0, 1, 1, -360, 72.34], [567, \n 286, 0, 0.008068760330578512, 0.34144067500694797, 1981.0, 1981.0, \n 1981.0, 0, 1, 1, -360, 48.816], [353, 519, 0, 0.007621818181818182, \n 0.080631926038356, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 23.055999999999997], [354, 353, 0, 0.0008436363636363636, \n 0.00892490784392768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.552], [355, \n 354, 0, 0.0068502479338842966, 0.0181173530898976, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.360999999999999], [354, 356, 0, 0.01855404958677686, \n 0.049071255647172, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 28.063000000000002], [357, 358, 0, 0.0034823407202216067, \n 0.5000300103406239, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 40.228], [\n 574, 359, 0, 0.013352066115702478, 0.0353131884615884, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 20.195], [235, 575, 0, 0.007459504132231404, \n 0.0789147905557, 991.0, 991.0, 991.0, 0, 1, 1, -360, 22.565], [167, 361,\n 0, 0.000616198347107438, 0.0065188198358579995, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 1.864], [528, 362, 0, 0.0011960330578512398, \n 0.012652945368078402, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 3.6180000000000003], [363, 344, 0, 0.0002662742382271468, \n 0.009558592968871479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.538], [\n 259, 364, 0, 0.013069713758102496, 0.26390852570525997, 1283.0, 1283.0,\n 1283.0, 0, 1, 1, -360, 56.618], [54, 56, 0, 0.007723337950138504, \n 0.0693122289241068, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.305], [365, \n 364, 0, 0.0049974607571537395, 0.10091058802821559, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 21.649], [231, 366, 0, 0.0013273891966759002, \n 0.0476500209962672, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, \n 7.667000000000001], [30, 367, 0, 0.01126108033240997, \n 0.1010613005635992, 856.0, 856.0, 856.0, 0, 1, 1, -360, 32.522], [61, \n 367, 0, 0.020337603878116343, 0.18251754162067196, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 58.735], [254, 368, 0, 0.0004297520661157025, \n 0.00454638722456732, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.3], [254, \n 369, 0, 0.00015999999999999999, 0.00169265493591832, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 0.484], [254, 370, 0, 0.0003669421487603306, \n 0.0038819152455960805, 991.0, 991.0, 991.0, 0, 2, 1, -360, 1.11], [99, \n 358, 0, 0.0020184383656509696, 0.28982797432374396, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 23.316999999999997], [354, 519, 0, \n 0.006762644628099174, 0.07154264880985199, 991.0, 991.0, 991.0, 0, 1, 1,\n -360, 20.457], [571, 371, 0, 0.023726942148760328, 0.06275238397221199,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 35.887], [207, 372, 0, \n 0.002329256198347108, 0.006160354689297601, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 3.523], [57, 373, 0, 0.0017725619834710745, \n 0.0046880246727212796, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.681], [209,\n 374, 0, 0.0010122922437673131, 0.0363388121515216, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 5.847], [375, 376, 0, 0.0045364727608518006, \n 0.0916021467933684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 19.652], [\n 376, 377, 0, 0.0030886426592797783, 0.062367022394423606, 1283.0, \n 1283.0, 1283.0, 0, 1, 1, -360, 13.38], [16, 49, 0, 0.002266101108033241,\n 0.32538991773524, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 26.178], [318,\n 377, 0, 0.004755078485685596, 0.0960163149704152, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 20.599], [378, 297, 0, 0.01753917355371901, \n 0.046387138574374404, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 26.528000000000002], [562, 379, 0, 0.01802314049586777, \n 0.047667121439141605, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.26], [576,\n 563, 0, 0.001808264462809917, 0.004782449638150801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 2.735], [576, 381, 0, 0.0034320661157024794, \n 0.009077036954898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.191], [577, \n 576, 0, 0.06004495867768594, 0.15880530575430396, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 90.818], [244, 383, 0, 0.006845567867036011, \n 0.1382282547912684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 29.655], [\n 244, 306, 0, 0.02679108956599723, 0.5409756541164079, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 116.059], [383, 306, 0, 0.0300685595567867, \n 0.269846910348376, 856.0, 856.0, 856.0, 0, 1, 1, -360, 86.838], [380, \n 306, 0, 0.00025605955678670365, 0.03676764369572, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 2.958], [252, 225, 0, 0.062094545454545444, \n 0.041056499553586, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 46.958999999999996], [220, 76, 0, 0.002772074099722992, \n 0.398042682239984, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 32.023], [542,\n 384, 0, 0.007939834710743802, 0.020999063146094, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 12.009], [385, 384, 0, 0.053734876033057856, \n 0.035529141854791196, 248.0, 248.0, 248.0, 0, 1, 1, -360, 40.637], [542,\n 385, 0, 0.011306115702479337, 0.119608453436296, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 34.201], [386, 385, 0, 0.003668760330578512, \n 0.0388121580140316, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 11.097999999999999], [387, 578, 0, 0.015444628099173553, \n 0.16339016240905604, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.72], [332, \n 388, 0, 0.014036184210526315, 0.5038646344377999, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 81.07300000000001], [382, 332, 0, \n 0.017764369806094183, 0.637697365901468, 1711.0, 1711.0, 1711.0, 0, 1, \n 1, -360, 102.60700000000001], [382, 388, 0, 0.00476159972299169, \n 0.17092976750548, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 27.503], [579,\n 578, 0, 0.01911074380165289, 0.050543585664, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 28.905], [577, 387, 0, 0.07597818181818182, \n 0.20094506949431204, 495.0, 495.0, 495.0, 0, 1, 1, -360, 114.917], [144,\n 390, 0, 0.0004277685950413223, 0.0011313509747276, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 0.647], [37, 49, 0, 0.008441481994459835, \n 0.303028527944352, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 48.758], [391,\n 233, 0, 0.014211218836565096, 0.1275369872004348, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 41.042], [392, 310, 0, 0.007035318559556785, \n 0.06313767618386361, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 20.317999999999998], [260, 393, 0, 0.006341412742382271, \n 0.0569102963692744, 856.0, 856.0, 856.0, 0, 1, 1, -360, 18.314], [394, \n 230, 0, 0.0007590027700831025, 0.00681158510656168, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 2.1919999999999997], [395, 282, 0, 0.008762984764542936,\n 0.314569689934484, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.615], [395,\n 244, 0, 0.0034046052631578946, 0.12221699007344, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 19.665], [25, 396, 0, 0.008809037396121884, \n 0.316222866612064, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.881], [81,\n 74, 0, 0.0075207756232686974, 0.26997742429652244, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 43.44], [278, 80, 0, 0.016286011080332407, \n 0.5846279085788, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 94.068], [81, \n 278, 0, 0.021054016620498613, 0.755787629231688, 1711.0, 1711.0, 1711.0,\n 0, 2, 1, -360, 121.60799999999999], [569, 570, 0, 0.03253950413223141, \n 0.08605961294018, 495.0, 495.0, 495.0, 0, 1, 1, -360, 49.216], [397, \n 552, 0, 0.006289586776859504, 0.0166345314104904, 1200.0, 1200.0, \n 1200.0, 0, 1, 1, -360, 9.513], [542, 398, 0, 0.0005580165289256199, \n 0.0059033089500572, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 1.6880000000000002], [398, 385, 0, 0.021893553719008262, \n 0.05790348713648401, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 33.114000000000004], [399, 499, 0, 0.03266380165289256, \n 0.021597087927192803, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 24.701999999999998], [83, 399, 0, 0.025700495867768593, \n 0.016992996557050798, 248.0, 248.0, 248.0, 0, 1, 1, -360, 19.436], [498,\n 400, 0, 0.012134214876033058, 0.032092247974028, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 18.352999999999998], [518, 239, 0, 0.04685289256198347, \n 0.123915281026504, 495.0, 495.0, 495.0, 0, 1, 1, -360, 70.865], [575, \n 543, 0, 0.0030307438016528923, 0.032062521596058796, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 9.168], [401, 360, 0, 0.007957063711911357, \n 0.071409774520472, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.98], [580, \n 581, 0, 0.007134545454545454, 0.018869255592422397, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.790999999999999], [401, 402, 0, 0.0033434903047091418,\n 0.030005778188384805, 856.0, 856.0, 856.0, 0, 1, 1, -360, 9.656], [403,\n 231, 0, 0.009592105263157893, 0.08608327126915, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 27.701999999999998], [189, 360, 0, 0.028456024930747923, \n 0.255375399471348, 856.0, 856.0, 856.0, 0, 1, 1, -360, 82.181], [234, \n 404, 0, 0.008092561983471074, 0.0214029921648796, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 12.24], [235, 404, 0, 0.05107504132231405, \n 0.13508190749437998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 77.251], [235,\n 580, 0, 0.000580495867768595, 0.00153527999352772, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 0.878], [216, 259, 0, 0.0022115650969529088, \n 0.079389770210892, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, \n 12.774000000000001], [405, 259, 0, 0.0052832409972299165, \n 0.1896554115982928, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 30.516], [\n 405, 318, 0, 0.0066348684210526315, 0.23817552558268398, 1711.0, 1711.0,\n 1711.0, 0, 2, 1, -360, 38.323], [406, 230, 0, 8.098164819944598e-05, \n 0.046512685161986804, 6845.0, 6845.0, 6845.0, 0, 1, 1, -360, 1.871], [\n 542, 407, 0, 0.025569586776859506, 0.067625761355152, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 38.674], [23, 408, 0, 0.03224528925619835, \n 0.08528148128033601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 48.771], [577,\n 348, 0, 0.012999008264462809, 0.13751772188026398, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 39.321999999999996], [562, 564, 0, 0.06921520661157024, \n 0.18305853298686803, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 104.68799999999999], [582, 507, 0, 0.006357685950413223, \n 0.016814638289042002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.616], [27, \n 410, 0, 0.0030042975206611565, 0.007945685980170399, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 4.544], [501, 27, 0, 0.003811570247933884, \n 0.040322957460962, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.53], [27, 411,\n 0, 0.004648595041322314, 0.012294480221518, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 7.031000000000001], [411, 410, 0, 0.002054214876033058, \n 0.0054329327333556, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 3.1069999999999998], [403, 360, 0, 0.008191481994459833, \n 0.07351353506655639, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 23.656999999999996], [412, 360, 0, 0.016761772853185596, \n 0.15042664773666, 856.0, 856.0, 856.0, 0, 1, 1, -360, 48.408], [326, \n 413, 0, 0.012077024793388432, 0.12776397267356798, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 36.533], [414, 413, 0, 0.008093223140495867, \n 0.08561896310149601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 24.482], [6, \n 297, 0, 0.019472396694214876, 0.0128750188978664, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 14.725999999999999], [554, 580, 0, 0.07435371900826447, \n 0.196648733567264, 495.0, 495.0, 495.0, 0, 1, 1, -360, 112.46], [262, \n 401, 0, 0.03931232686980609, 0.35280406181043206, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 113.53399999999999], [499, 556, 0, 0.04185586776859504, \n 0.11069928308639199, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 63.306999999999995], [224, 229, 0, 0.004135206611570248, \n 0.0437467367631624, 991.0, 991.0, 991.0, 0, 1, 1, -360, 12.509], [583, \n 507, 0, 0.024632727272727268, 0.065147980317596, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 37.257], [415, 307, 0, 0.015675554016620498, \n 0.1406784987952448, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.271], [416, \n 507, 0, 0.0010555371900826446, 0.011166626467730801, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 3.193], [284, 561, 0, 0.015221487603305786, \n 0.16102953827307598, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.045], [543,\n 417, 0, 0.0006614876033057851, 0.027991756419545603, 1981.0, 1981.0, \n 1981.0, 0, 4, 1, -360, 4.002], [418, 506, 0, 0.0009395041322314049, \n 0.009939101917118, 991.0, 991.0, 991.0, 0, 1, 1, -360, 2.842], [220, \n 157, 0, 0.004599549861495845, 0.165112574384632, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 26.566999999999997], [295, 419, 0, 0.0012023140495867769,\n 0.012719392565946, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.637], [295, \n 420, 0, 0.0008003305785123967, 0.008466771900532, 991.0, 991.0, 991.0, \n 0, 1, 1, -360, 2.421], [541, 62, 0, 0.05133355371900827, \n 0.0339414035471236, 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.821], [52, \n 421, 0, 0.00013885041551246538, 0.004984389831631239, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 0.802], [60, 160, 0, 6.128808864265928e-05, \n 0.000550023067454096, 856.0, 856.0, 856.0, 0, 2, 1, -360, 0.177], [535,\n 161, 0, 3.735537190082645e-05, 0.00039518596644331203, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 0.113], [267, 282, 0, 0.0065652700831024926, \n 0.235677115717012, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 37.921], [52,\n 365, 0, 0.007655586334279779, 0.15458444922992, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 33.164], [28, 27, 0, 0.015726942148760328, \n 0.041594197273402404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.787], [30,\n 201, 0, 0.009128289473684211, 0.327683234253536, 1711.0, 1711.0, 1711.0,\n 0, 2, 1, -360, 52.725], [422, 81, 0, 0.0004226685133887349, \n 0.13655487952674, 5134.0, 5134.0, 5134.0, 0, 6, 1, -360, 7.324], [119, \n 425, 0, 0.003579120498614958, 0.1284816595874996, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 20.673000000000002], [423, 425, 0, \n 0.0006518351800554017, 0.0233992864289392, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 3.765], [424, 425, 0, 0.005922957063711911, \n 0.21261965153389198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.211], [\n 426, 428, 0, 0.013948429752066116, 0.14756174042535197, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 42.193999999999996], [427, 428, 0, \n 0.0002664462809917355, 0.0028187600792304794, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 0.8059999999999999], [19, 428, 0, 0.023607603305785128, \n 0.24974703912892798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 71.413], [45, \n 429, 0, 0.02562314049586777, 0.067767398802972, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 38.755], [44, 429, 0, 5.289256198347107e-05, \n 0.00013988883767892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.08], [505, \n 429, 0, 0.006012561983471073, 0.015901863623161996, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 9.094], [231, 431, 0, 0.011677285318559558, \n 0.4191859418495199, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 67.44800000000001], [190, 431, 0, 0.009600761772853185, \n 0.34464383257266795, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 55.45399999999999], [430, 431, 0, 0.0028100761772853187, \n 0.1008748520662472, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 16.230999999999998], [286, 433, 0, 0.01568694214876033, \n 0.16595362535967603, 991.0, 991.0, 991.0, 0, 1, 1, -360, 47.453], [432,\n 433, 0, 0.00010049586776859504, 0.00106315516636076, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 0.304], [506, 433, 0, 0.0065904132231404955, \n 0.06972059669946801, 991.0, 991.0, 991.0, 0, 1, 1, -360, 19.936], [23, \n 434, 0, 0.02613685950413223, 0.069126069139116, 495.0, 495.0, 495.0, 0,\n 2, 1, -360, 39.532], [400, 434, 0, 0.008155371900826446, \n 0.021569110159669603, 495.0, 495.0, 495.0, 0, 2, 1, -360, 12.335], [500,\n 434, 0, 0.006338512396694216, 0.0167639285853336, 495.0, 495.0, 495.0, \n 0, 2, 1, -360, 9.587], [32, 436, 0, 0.0044813019390581715, \n 0.16086776359270402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 25.884], [\n 435, 436, 0, 0.0006634349030470914, 0.023815688073266, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 3.832], [78, 436, 0, 0.00897680055401662, \n 0.32224515307884394, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.85], [86,\n 438, 0, 0.014693213296398892, 0.52745036936438, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 84.868], [437, 438, 0, 1.0387811634349031e-05, \n 0.0003728969948845, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.06], [221,\n 438, 0, 0.002280124653739612, 0.081850890377238, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 13.17], [207, 439, 0, 0.055703801652892564, \n 0.0368309823503996, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 42.126000000000005], [516, 439, 0, 0.05448462809917355, \n 0.03602487292327441, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 41.20399999999999], [513, 439, 0, 0.046726611570247926, \n 0.0308953241066316, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 35.336999999999996], [181, 441, 0, 0.040805289256198356, \n 0.10792074104825197, 495.0, 495.0, 495.0, 0, 1, 1, -360, 61.718], [440,\n 441, 0, 0.0001322314049586777, 0.000349722094197784, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.2], [504, 441, 0, 0.05916099173553719, \n 0.156467413554364, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 89.48100000000001], [135, 442, 0, 0.004956890581717451, \n 0.177940231009092, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 28.631], [109,\n 442, 0, 0.0015380886426592797, 0.055213615042649204, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 8.884], [112, 442, 0, 0.0027304362880886425, \n 0.09801597510545401, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 15.770999999999999], [113, 443, 0, 0.0019885734072022164, \n 0.07138491472072879, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 11.485999999999999], [132, 443, 0, 0.006788434903047091, \n 0.24368818615747198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 39.21], [\n 107, 443, 0, 2.2333795013850418e-05, 0.000801728539002036, 1711.0, \n 1711.0, 1711.0, 0, 1, 1, -360, 0.129], [444, 445, 0, \n 7.877423822714682e-05, 0.00282780221121528, 1711.0, 1711.0, 1711.0, 0, \n 1, 1, -360, 0.455], [112, 445, 0, 0.002816135734072022, \n 0.101092375313206, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.266], [109,\n 445, 0, 0.0014354224376731304, 0.0515281497432104, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 8.291], [119, 447, 0, 0.005212690443213296, \n 0.74849127803204, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 60.217], [100,\n 447, 0, 0.0050695117728531865, 0.7279322237145921, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 58.563], [446, 447, 0, 2.9518698060941832e-05, \n 0.00423859584186224, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.341], [\n 124, 448, 0, 6.509695290858726e-05, 0.00233682116794768, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 0.376], [125, 448, 0, 0.00615148891966759, \n 0.22082338542026803, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.531], [\n 131, 448, 0, 3.912742382271468e-05, 0.0014045786807313759, 1711.0, \n 1711.0, 1711.0, 0, 1, 1, -360, 0.226], [449, 450, 0, \n 0.0023614958448753462, 0.08477191683710039, 1711.0, 1711.0, 1711.0, 0, \n 1, 1, -360, 13.64], [173, 450, 0, 0.002862361495844876, \n 0.10275176694050518, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.533], [\n 184, 450, 0, 0.004022853185595568, 0.14441057621844403, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 23.236], [144, 451, 0, 0.007672727272727273, \n 0.020292624515794402, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.605], [140,\n 451, 0, 0.006991074380165291, 0.018489807120219602, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.574000000000002], [514, 451, 0, 0.01149289256198347, \n 0.030396095817207994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.383], [537,\n 585, 0, 0.05072595041322314, 0.134158641165824, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 76.723], [141, 585, 0, 0.007994710743801653, \n 0.0211441978151932, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.092], [584, \n 585, 0, 9.256198347107438e-05, 0.000244805465938352, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.14], [522, 454, 0, 0.0035008264462809916, \n 0.0092588924438956, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.295], [144, \n 454, 0, 0.00452892561983471, 0.011977981726290799, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 6.85], [453, 454, 0, 0.001114710743801653, \n 0.0029481572540882, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.686], [199, \n 456, 0, 0.013063140495867768, 0.0086372614214612, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 9.879], [140, 456, 0, 0.005061818181818182, \n 0.013387361765852802, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 7.656000000000001], [455, 456, 0, 0.0011365289256198346, \n 0.00300586139962416, 495.0, 495.0, 495.0, 0, 2, 1, -360, 1.719], [537, \n 456, 0, 0.039058512396694216, 0.025825228046024003, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 29.538], [538, 457, 0, 0.027927272727272728, \n 0.0184653265736368, 248.0, 248.0, 248.0, 0, 1, 1, -360, 21.12], [153, \n 457, 0, 0.030093223140495867, 0.019897438549384, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 22.758000000000003], [176, 457, 0, 0.004579173553719009, \n 0.0030277190305137603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 3.463], [524,\n 459, 0, 0.004318677685950414, 0.011421923596476799, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 6.532], [458, 459, 0, 0.001993388429752066, \n 0.0052720605700488, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.015], [134, \n 459, 0, 0.011813553719008265, 0.031244171895617998, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 17.868], [460, 461, 0, 6.611570247933885e-05, \n 0.000174861047098892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.1], [150, \n 461, 0, 0.008018512396694214, 0.021207147792120403, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 12.128], [149, 461, 0, 0.005586115702479339, \n 0.0147740098693748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.449], [521, \n 463, 0, 0.014348429752066114, 0.009487086110365599, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 10.850999999999999], [462, 463, 0, 0.007197355371900825,\n 0.0047588433967958406, 248.0, 248.0, 248.0, 0, 1, 1, -360, 5.443], [538,\n 463, 0, 0.012211570247933883, 0.0080742088497664, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 9.235], [110, 464, 0, 0.0025753116343490306, \n 0.0924473799817492, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.875], [90,\n 464, 0, 0.007328947368421053, 0.26309125979076, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 42.332], [165, 464, 0, 0.002152527700831025, \n 0.0772704722900764, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.433], [\n 458, 465, 0, 0.002003305785123967, 0.0052982897270776, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 3.03], [134, 465, 0, 0.011838677685950413, \n 0.031310619093534, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.906], [524, \n 465, 0, 0.004293553719008264, 0.0113554763986092, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.494], [466, 467, 0, 0.0023509349030470914, \n 0.084392804892244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.579], [110,\n 467, 0, 0.0025337603878116343, 0.09095579200221118, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 14.635], [165, 467, 0, 0.0022891274238227145, \n 0.08217406777274441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 13.222000000000001], [468, 469, 0, 0.0005269421487603305, \n 0.0013936425453786, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.797], [541, \n 469, 0, 0.022390743801652895, 0.05921844221026801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 33.866], [490, 469, 0, 0.028243305785123966, \n 0.07469714209944801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.718], [263,\n 471, 0, 0.0371900826446281, 0.0245898347482832, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 28.125], [470, 471, 0, 0.001570909090909091, \n 0.0010386746197682802, 248.0, 248.0, 248.0, 0, 1, 1, -360, 1.188], [534,\n 471, 0, 0.024497190082644622, 0.0161973787927468, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 18.526], [136, 472, 0, 0.0007079293628808865, \n 0.025412930201351602, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 4.0889999999999995], [110, 472, 0, 0.00019511772853185596, \n 0.0070042485539216805, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.127], [\n 251, 472, 0, 4.207063711911357e-05, 0.00151023282928764, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 0.243], [226, 474, 0, 0.017639669421487602, \n 0.011663231841509601, 248.0, 248.0, 248.0, 0, 1, 1, -360, 13.34], [473,\n 474, 0, 0.003467107438016529, 0.00916971330986216, 495.0, 495.0, 495.0,\n 0, 2, 1, -360, 5.244], [257, 474, 0, 0.020264462809917356, \n 0.053594910935781594, 495.0, 495.0, 495.0, 0, 2, 1, -360, 30.65], [6, \n 474, 0, 0.08066247933884299, 0.05333349367016, 248.0, 248.0, 248.0, 0, \n 1, 1, -360, 61.001000000000005], [299, 475, 0, 0.013238227146814403, \n 0.47521993028123993, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 76.464], [3,\n 475, 0, 0.0002794321329639889, 0.010030929162389441, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 1.614], [210, 475, 0, 0.0001481994459833795, \n 0.00531999712702368, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.856], [\n 297, 476, 0, 0.0193500826446281, 0.05117658265464801, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 29.267], [296, 476, 0, 0.005596694214876033, \n 0.014801987636898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.465], [295, \n 476, 0, 0.0009474380165289256, 0.00250575880492432, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 1.433], [313, 478, 0, 0.008696849030470914, \n 0.31219557906752804, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 50.233000000000004], [477, 478, 0, 1.5235457063711912e-05, \n 0.0005469155924977479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 0.08800000000000001], [245, 478, 0, 0.005264542936288089, \n 0.188984197007248, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.408], [479,\n 481, 0, 0.028420495867768597, 0.07516576970575199, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 42.986000000000004], [565, 481, 0, 0.024842314049586776,\n 0.065702289836964, 495.0, 495.0, 495.0, 0, 1, 1, -360, 37.574], [480, \n 481, 0, 7.735537190082645e-05, 0.000204587425105844, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.11699999999999999], [415, 482, 0, \n 0.011021814404432133, 0.0989140353680364, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 31.831], [56, 482, 0, 0.002630886426592798, 0.0236105947261788, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 7.598], [409, 482, 0, \n 0.0007635041551246537, 0.0068519822810072005, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 2.205], [483, 484, 0, 9.037396121883656e-05, \n 0.000811050963873968, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.261], [3, \n 484, 0, 0.010022160664819944, 0.08994275516621358, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 28.944000000000003], [301, 484, 0, 0.00966516620498615, \n 0.08673894848517479, 856.0, 856.0, 856.0, 0, 1, 1, -360, 27.913], [233,\n 485, 0, 0.01410180055401662, 0.1265550251138996, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 40.726], [392, 485, 0, 0.00914819944598338, \n 0.0820994883738036, 856.0, 856.0, 856.0, 0, 1, 1, -360, 26.42], [391, \n 485, 0, 8.518005540166207e-05, 0.000764438839512864, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 0.24600000000000002], [579, 488, 0, \n 0.004636473829194215, 0.11036180126571601, 1486.0, 1486.0, 1486.0, 0, 1,\n 1, -360, 21.038], [486, 488, 0, 0.00016969696969690082, \n 0.00403929018798184, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 0.77], [487,\n 488, 0, 0.00014567493112954544, 0.00346749456396992, 1486.0, 1486.0, \n 1486.0, 0, 1, 1, -360, 0.6609999999999999], [270, 489, 0, \n 0.0001745152354570637, 0.0062646695140596, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 1.008], [331, 489, 0, 0.003002943213296399, \n 0.10779830627119119, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 17.345], [\n 396, 489, 0, 0.01124792243767313, 0.40377286606072005, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 64.968], [519, 253, 0, 0.013353485337561985, \n 0.141267767926912, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 40.394293146100004], [382, 349, 0, 0.009091647380263157, \n 1.30547149138788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 105.02671053600001], [349, 351, 0, 0.0005858117819605263, \n 0.0841168325920224, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 6.76729770521], [459, 465, 0, 1.578788789911157e-05, \n 0.00016702153987596, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.047758360894800005], [549, 550, 0, 3.680432518409091e-05, \n 0.000389356391787088, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.111333083682], [550, 551, 0, 5.755645674710744e-05, \n 0.0006088951287918401, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.17410828165999997], [194, 195, 0, 1.7560672583171745e-05, \n 0.00252154053805592, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.202860889681], [247, 248, 0, 2.1755213937811637e-05, \n 0.0031238355819477198, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.25131623141], [2, 294, 0, 2.3531392658518004e-05, 0.003378877444715, \n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.271834647991], [549, 551, 0, \n 9.265809538429751e-05, 0.0009802386406577602, 991.0, 991.0, 991.0, 0, 1,\n 1, -360, 0.28029073853799996], [54, 365, 0, 2.573045189134349e-05, \n 0.00369464080598484, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.297238180249], [131, 265, 0, 2.7616389041343487e-05, \n 0.00396544290388756, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.319024526206], [91, 92, 0, 2.8945628197853184e-05, \n 0.0041563086239824396, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.33437989694200004], [247, 249, 0, 3.098840072160664e-05, \n 0.00444963074500788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.357978005136], [186, 191, 0, 3.1591661821191135e-05, \n 0.00453625312865552, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.36494687735799997], [129, 173, 0, 3.202671277479225e-05, \n 0.00459872218332188, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.369972585975], [96, 202, 0, 3.5971247867797784e-05, \n 0.00516511877739804, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.415539855369], [53, 320, 0, 3.784209581142659e-05, \n 0.00543375421308236, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.437151890814], [24, 396, 0, 4.144748602818559e-05, \n 0.005951452925597279, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.47880135859800005], [133, 156, 0, 4.431754564044322e-05, \n 0.0063635653674415605, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.511956287238], [442, 452, 0, 4.483572190450138e-05, \n 0.006437970402313801, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.517942259441], [445, 452, 0, 4.490753296371191e-05, \n 0.0064482817668697215, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.518771820797], [247, 250, 0, 4.594910768732687e-05, \n 0.00659784169268824, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.530804092004], [187, 195, 0, 4.755760376239612e-05, \n 0.006828805970367921, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.549385438663], [216, 236, 0, 5.03353075283241e-05, \n 0.00722765701751724, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.581473472567], [244, 389, 0, 5.1633313019736845e-05, \n 0.007414037889302401, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.596468032004], [394, 406, 0, 5.6346419007686985e-05, \n 0.008090793734075721, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.650913832377], [442, 445, 0, 6.388070648310249e-05, \n 0.00917264360085512, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.737949921293], [442, 444, 0, 6.584378362735456e-05, \n 0.00945452224616264, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.760627388463], [198, 472, 0, 8.37554210498615e-05, 0.0120264578966664,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.967542623967], [464, 467, 0, \n 8.460287496468144e-05, 0.01214814397621276, 3423.0, 3423.0, 3423.0, 0, \n 1, 1, -360, 0.977332411594], [198, 251, 0, 8.83613182396122e-05, \n 0.012687819608389479, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.0207499483], [112, 143, 0, 9.049653833033241e-05, \n 0.012994416294241841, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.04541601079], [2, 490, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [5, 491, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, \n 1, -360, 360], [10, 492, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [12, 493, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [13, 494, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [15, 495, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [18, 496, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [20, 497, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [22, 498, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [24, 499, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [26, 500, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [30, 501, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [32, 502, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [37, 503, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [42, 504, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [46, 505, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [52, 506, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [56, 507, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [61, 508, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [68, 509, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [69, 510, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [74, 511, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [78, 512, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [86, 513, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [87, 514, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [94, 515, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [95, 516, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [96, 517, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [99, 518, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [100, 519, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [104, 520, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [105, 521, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [106, 522, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [107, 523, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [117, 524, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [120, 525, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [123, 526, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [124, 527, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [125, 528, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [128, 529, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [129, 530, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [138, 531, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [143, 532, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [156, 533, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [157, 534, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [159, 535, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [160, 536, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [165, 537, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [184, 538, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [191, 539, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [195, 540, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [201, 541, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [220, 542, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [231, 543, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [232, 544, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [233, 545, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [236, 546, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [245, 547, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [246, 548, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [248, 549, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [249, 550, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [250, 551, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [259, 552, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [261, 553, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [262, 554, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [265, 555, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [270, 556, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [277, 557, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [279, 558, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [280, 559, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [290, 560, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [301, 561, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [305, 562, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [306, 563, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [310, 564, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [313, 565, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [315, 566, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [320, 567, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [330, 568, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [332, 569, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [334, 570, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [336, 571, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [349, 572, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [351, 573, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [358, 574, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [360, 575, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [380, 576, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [382, 577, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [383, 578, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [389, 579, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [401, 580, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [402, 581, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [409, 582, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [415, 583, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [444, 584, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [452, 585, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360]]'], {}), '([[586, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [589, \n 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [590, 108, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [593, 112, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [594, 114, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [595, 115, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [597, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [598, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [599, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [601, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [602,\n 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [603, 526, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [607, 127, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [608, 127, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [609, 529, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [610, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [612, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [613, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [614, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [616,\n 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [617, 133, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [618, 133, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [619, 134, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [621, 136, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [623, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [624, 14, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [628, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [629, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [631,\n 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [632, 145, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [637, 148, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [638, 149, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [639, 150, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [640, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [641, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [642, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [643, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [646,\n 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [647, 536, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [650, 166, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [652, 167, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [655, 170, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [657, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [658, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [661, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [662, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [663,\n 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [666, 180, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [668, 183, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [670, 183, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [672, 185, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [676, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [677, 190, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [678, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [679, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [681,\n 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [683, 200, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [687, 202, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [689, 204, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [691, 209, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [693, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [694, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [695, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [696, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [697,\n 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [698, 212, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [699, 213, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [700, 214, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [701, 215, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [702, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [704, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [705, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [707, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [708,\n 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [711, 224, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [713, 225, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [714, 225, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [716, 226, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [717, 227, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [719, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [721, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [722, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [723,\n 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [724, 238, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [725, 239, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [726, 240, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [727, 243, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [728, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [730, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [731, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [732, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [733,\n 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [735, 253, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [736, 256, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [737, 256, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [738, 258, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [739, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [741, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [742, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [743, 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [745,\n 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [746, 273, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [747, 273, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [748, 274, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [749, 274, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [750, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [758, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [760, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [761, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [762,\n 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [763, 560, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [765, 560, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [767, 292, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [769, 293, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [771, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [772, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360,\n 360], [774, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 775, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [776, 300,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [777, 300, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [778, 300, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [779, 302, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [781, 303, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [784, 563, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [785, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [786, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [787, \n 308, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [788, 311, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [789, 565, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [791, 314, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [792, 316, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [793, 318, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [794, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [795, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [796, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [798,\n 324, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [800, 326, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [801, 327, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [802, 327, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [805, 328, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [806, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [808, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [809, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [811, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [814,\n 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [816, 335, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [817, 571, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [818, 34, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [821, 338, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [822, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [825, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [826, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [830, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [833,\n 348, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [834, 572, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [835, 572, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [836, 572, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [837, 350, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [839, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [840, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [841, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [843, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [844,\n 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [845, 356, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [848, 574, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [849, 574, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [850, 574, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [851, 575, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [852, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [853, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [855, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [856,\n 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [857, 365, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [858, 368, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [859, 368, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [860, 371, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [862, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [863, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [864, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [865, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [866,\n 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [867, 376, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [869, 503, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [870, 503, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [872, 378, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [873, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [874, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [875, 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [876, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [877,\n 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [881, 388, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [882, 388, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [883, 388, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [885, 393, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [886, 394, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [888, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [889, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [890, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [895, \n 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [896, 581, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [897, 403, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [898, 403, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [899, 405, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [900, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [902, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [903, 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [905, 413, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [906,\n 414, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [907, 583, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [909, 417, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [913, 422, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [915, 423, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [917, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [918, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [920, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [921, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [922,\n 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [923, 432, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [924, 433, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [925, 44, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [928, 435, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [931, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [934, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [935, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [936, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [937, \n 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [939, 450, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [940, 451, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [942, 458, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [944, 458, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [945, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [948, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [950, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [952, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [956, \n 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [957, 478, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [958, 478, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [959, 478, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [960, 479, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [963, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [965, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [966, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [967, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [968, 486,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [969, 486, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [971, 51, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [972, 506, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [973, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [975, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [976, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [977, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [978, 491,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [981, 62, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [982, 62, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [983, 62, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [984, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [985, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [986, 64, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [987, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [988, 66,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [990, 67, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [993, 67, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [994, 67, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [995, 509, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [996, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [997, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [998, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [999, \n 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1000, 71, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1002, 71, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1003, 72, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1007, 511, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1008, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1010, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1011, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1012, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1018,\n 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1019, 514, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1023, 515, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1025, 518, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1026, 518, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1027, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1028, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1029, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1030, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1031, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1032, 1,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1033, 3, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1034, 4, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1035, 6, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1036, 7, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [1037, 8, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1038, 9, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [1039, 11, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1041, \n 16, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1042, 17, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1044, 21, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1046, 25, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1047, 27, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1048, 28, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1049, 29, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1050, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1051, 33, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1052,\n 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1053, 35, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1054, 36, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1055, 38, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1056, 39, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1057, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1058, 41, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1059, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1060, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1061,\n 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1062, 47, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1063, 48, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1064, 49, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1065, 50, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1066, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1067, 53, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1072, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1073, 60, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1074,\n 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1077, 65, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1079, 67, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1080, 70, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1081, 71, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1082, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1083, 73, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1084, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1085, 76, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1086,\n 77, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1087, 79, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1088, 80, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1089, 81, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1090, 82, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1091, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1092, 84, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1093, 85, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1094, 88, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1095,\n 89, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1096, 90, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1097, 91, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1098, 92, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1099, 93, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1101, 98, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1102, 101, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1103, 102, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1104, 103, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1105, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1106, \n 109, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1107, 110, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1108, 111, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1109, 112, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1110, 113, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1111, 114, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1112, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1113, 116, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1114, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1115, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1116, \n 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1117, 122, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1118, 126, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1119, 127, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1120, 130, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1121, 131, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1122, 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1123, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1124, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1125, 135, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1126, \n 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1127, 137, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1128, 139, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1129, 140, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1130, 141, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1131, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1132, 144, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1133, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1134, 146, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1135, 147, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1136, \n 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1137, 149, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1138, 150, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1139, 151, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1140, 152, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1141, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1142, 154, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1143, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1144, 158, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1145, 161, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1146, \n 162, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1147, 163, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1148, 164, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1149, 166, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1150, 167, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1151, 168, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1152, 169, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1153, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1154, 171, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1155, 172, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1156, \n 173, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1157, 174, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1158, 175, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1159, 176, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1160, 177, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1161, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1162, 179, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1163, 180, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1164, 181, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1165, 182, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1166, \n 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1167, 185, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1168, 186, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1169, 187, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1170, 188, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1173, 192, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1174, 193, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1175, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1176, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1177, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1178, \n 198, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1179, 199, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1180, 200, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1181, 202, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1182, 203, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1183, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1184, 205, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1185, 206, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1186, 207, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1187, 208, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1188, \n 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1189, 210, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1190, 211, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1191, 212, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1196, 217, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1197, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1198, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1199, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1200, 222, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1203, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1204, \n 226, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1211, 237, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1212, 238, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1213, 239, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1214, 240, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1215, 241, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1216, 242, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1217, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1218, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1219, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1220, \n 251, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1221, 252, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1222, 253, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1225, 256, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1226, 257, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1228, 260, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1229, 263, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1230, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1231, 266, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1232, 267, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1233, \n 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1235, 271, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1236, 272, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1237, 273, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1238, 274, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1239, 275, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1240, 276, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1241, 278, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1242, 281, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1243, 282, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1244, \n 283, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1245, 284, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1246, 285, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1247, 286, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1248, 287, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1249, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1250, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1251, 291, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1252, 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1253, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1254, \n 294, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1255, 295, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1256, 296, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1257, 297, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1258, 298, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1259, 299, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1260, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1261, 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1267, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1274, 321, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1275, \n 322, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1276, 323, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1277, 324, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1278, 325, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1282, 329, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1283, 331, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1287, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1288, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1289, 340, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1290, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1291, \n 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1292, 343, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1293, 344, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1294, 345, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1295, 346, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1300, 353, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1301, 354, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1302, 355, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1303, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1306, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1307, \n 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1308, 363, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1312, 367, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1317, 372, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1319, 374, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1323, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1326, 384, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1327, 385, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1328, 386, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1331, 390, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1336, \n 395, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1337, 396, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1339, 398, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1340, 399, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1346, 407, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1348, 410, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1349, 411, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1356, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1357, 420, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1359, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1360, \n 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1361, 424, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1362, 425, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1363, 426, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1364, 427, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1365, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1366, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1372, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1373, 436, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1374, 437, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1375, \n 438, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1376, 439, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1377, 440, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1378, 441, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1379, 442, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1380, 443, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1381, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1382, 446, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1383, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1384, 448, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1385, \n 449, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1386, 450, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1387, 451, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1388, 453, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1389, 454, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1390, 455, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1391, 456, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1392, 457, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1393, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1394, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1395, \n 460, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1396, 461, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1397, 462, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1398, 463, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1399, 464, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1400, 465, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1401, 466, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1402, 467, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1403, 468, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1404, 469, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1405, \n 470, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1406, 471, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1407, 472, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1408, 473, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1409, 474, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1410, 475, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1411, 476, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1412, 477, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1413, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1414, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1418, \n 483, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1419, 484, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1421, 486, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1422, 487, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1423, 488, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1424, 489, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1425, 490, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1426, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1427, 492, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1428, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1431, \n 496, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1432, 497, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1433, 498, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1434, 499, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1435, 500, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1436, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1437, 502, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1438, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1439, 504, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1440, 505, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1443, \n 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1446, 511, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1447, 512, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1448, 513, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1449, 514, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1450, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1451, 516, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1452, 517, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1453, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1454, 519, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1455, \n 520, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1456, 521, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1457, 522, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1458, 523, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1459, 524, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1460, 525, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1461, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1462, 527, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1463, 528, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1464, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1465, \n 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1466, 531, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1467, 532, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1468, 533, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1469, 534, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1470, 535, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1471, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1472, 537, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1473, 538, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1474, 539, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1475, \n 540, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1476, 541, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1477, 542, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1482, 547, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1483, 548, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1484, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1485, 550, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1486, 551, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1489, 555, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1490, 556, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1491, \n 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1492, 558, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1493, 559, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1494, 560, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1495, 561, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1500, 566, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1501, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1503, 569, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1504, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1512, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1513, \n 579, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1518, 584, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1519, 585, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1, 490, 0, \n 0.01433884297520661, 0.151691958358336, 991.0, 991.0, 991.0, 0, 2, 1, -\n 360, 43.375], [3, 4, 0, 0.006291637811634348, 0.903417549506624, 3423.0,\n 3423.0, 3423.0, 0, 2, 1, -360, 72.681], [491, 6, 0, \n 0.011200661157024791, 0.118492839955776, 991.0, 991.0, 991.0, 0, 2, 1, \n -360, 33.882], [7, 5, 0, 0.005794840720221606, 0.20802058859584005, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.471], [8, 9, 0, \n 0.0024379328254847646, 0.350063268897336, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 28.163], [492, 11, 0, 0.018224793388429753, 0.0482004476327704,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.565], [11, 493, 0, \n 0.030286942148760328, 0.08010209706571599, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 45.809], [492, 493, 0, 0.04521652892561983, 0.11958747011094399, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 68.39], [494, 14, 0, \n 0.012990743801652892, 0.137430291356512, 991.0, 991.0, 991.0, 0, 2, 1, \n -360, 39.297], [13, 15, 0, 0.007681959833795014, 0.27576354266704156, \n 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 44.371], [16, 5, 0, \n 0.006275623268698061, 0.22527950450957998, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 36.248000000000005], [17, 18, 0, 0.04623522622347646, \n 0.9335989000302801, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 200.291], [\n 17, 12, 0, 0.0056020313942728535, 0.113118303398186, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 24.268], [14, 495, 0, 0.0017957024793388433, \n 0.018996904156819597, 991.0, 991.0, 991.0, 0, 1, 1, -360, 5.432], [494,\n 19, 0, 0.010246611570247935, 0.10839986031771602, 991.0, 991.0, 991.0, \n 0, 1, 1, -360, 30.996], [20, 21, 0, 0.005415685595567867, \n 0.19440984828307922, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 31.281], [\n 20, 22, 0, 0.0049706544321329645, 0.713737278110032, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 57.42100000000001], [497, 23, 0, \n 0.002190413223140496, 0.005793146490362, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 3.313], [23, 499, 0, 0.020799669421487598, 0.22004164444829602, \n 991.0, 991.0, 991.0, 0, 1, 1, -360, 62.919], [25, 26, 0, \n 0.00141845567867036, 0.050919084651523595, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 8.193], [25, 22, 0, 0.0035578254847645433, 0.0319293051869808,\n 856.0, 856.0, 856.0, 0, 1, 1, -360, 10.275], [23, 27, 0, \n 0.027738181818181818, 0.073361203699828, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 41.95399999999999], [28, 23, 0, 0.012841652892561981, \n 0.0339632611780132, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.423], [8, 21,\n 0, 0.004948753462603878, 0.17764812836304802, 1711.0, 1711.0, 1711.0, 0,\n 2, 1, -360, 28.584], [9, 29, 0, 0.002212863573407202, \n 0.31774552934092004, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 25.563000000000002], [30, 25, 0, 0.019958795013850415, \n 0.17911796401827998, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 57.641000000000005], [31, 32, 0, 0.0299776084949446, 0.605319030583196,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 129.863], [32, 33, 0, \n 0.016762234533725762, 0.33846927983213604, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 72.61399999999999], [34, 35, 0, 0.001931900826446281, \n 0.020437759184893597, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 5.843999999999999], [35, 36, 0, 0.0008730578512396695, \n 0.0092361605077588, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.641], [490, 6,\n 0, 0.049352066115702475, 0.130525028606764, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 74.645], [37, 10, 0, 0.02404639889196676, 0.485553838251812, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 104.169], [10, 38, 0, \n 0.006848799630657894, 0.13829351176534158, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 29.669], [37, 38, 0, 0.01437834718372576, 1.1613317560186958, \n 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 124.574], [39, 40, 0, \n 0.04521629732222991, 0.913024308337812, 1283.0, 1283.0, 1283.0, 0, 1, 1,\n -360, 195.877], [39, 41, 0, 0.017466989843005543, 0.35269996139852006, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 75.667], [42, 41, 0, \n 0.031145429362880884, 0.6289001042979919, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 134.922], [18, 42, 0, 0.03439750692520776, 0.6945672650962679,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 149.01], [492, 43, 0, \n 0.01819173553719008, 0.192452068436848, 991.0, 991.0, 991.0, 0, 2, 1, -\n 360, 55.03], [44, 45, 0, 0.02562314049586777, 0.067767398802972, 495.0,\n 495.0, 495.0, 0, 1, 1, -360, 38.755], [44, 505, 0, 0.006061487603305785,\n 0.0160312607980052, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.168], [46, 12,\n 0, 0.0014741170360110802, 0.2116687641962416, 3423.0, 3423.0, 3423.0, 0,\n 2, 1, -360, 17.029], [47, 48, 0, 0.005344182825484765, \n 0.01199019212302604, 428.0, 428.0, 428.0, 0, 1, 1, -360, \n 7.7170000000000005], [49, 50, 0, 0.0019151662049861494, \n 0.0171874439892256, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 5.531000000000001], [31, 33, 0, 0.013475992613088641, \n 0.27211225959163604, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 58.378], [\n 31, 51, 0, 0.003518611495844875, 0.5052381383693519, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 40.647], [52, 53, 0, 0.010464421745152355, \n 1.5025884408875438, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 120.885], [\n 52, 54, 0, 0.0076126500461911354, 0.1537174637168, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 32.978], [506, 55, 0, 0.012634380165289257, \n 0.133660287181212, 991.0, 991.0, 991.0, 0, 1, 1, -360, 38.219], [506, \n 507, 0, 0.044157355371900825, 0.11678619613628, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 66.788], [57, 506, 0, 0.004687272727272727, \n 0.049587095736244, 991.0, 991.0, 991.0, 0, 1, 1, -360, 14.179], [57, 58,\n 0, 0.014436363636363634, 0.0381809096340232, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 21.835], [58, 506, 0, 0.019797685950413223, 0.052360391943288,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.944000000000003], [59, 60, 0, \n 0.019407548476454296, 0.174170863885556, 856.0, 856.0, 856.0, 0, 1, 1, \n -360, 56.049], [508, 62, 0, 0.051111404958677685, 0.03379452026753001, \n 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.653], [30, 61, 0, \n 0.03143698060941828, 0.28212765137935203, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 90.79], [63, 506, 0, 0.027457190082644623, 0.072618044249872, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.528999999999996], [13, 64, 0, \n 0.0014816481994459833, 0.2127501654814608, 3423.0, 3423.0, 3423.0, 0, 2,\n 1, -360, 17.116], [65, 66, 0, 0.03778185595567867, 0.7629053006222161, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 163.671], [59, 67, 0, \n 0.0051880193905817175, 0.046559297286324804, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 14.982999999999999], [61, 67, 0, 0.012931440443213295, \n 0.1160517597580644, 856.0, 856.0, 856.0, 0, 1, 1, -360, 37.346], [68, \n 69, 0, 0.011149584487534626, 0.4002427745096039, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 64.4], [70, 69, 0, 0.009625346260387812, \n 0.345526355460808, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 55.596000000000004], [71, 72, 0, 0.008878635734072021, \n 0.318721276477736, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.283], [73,\n 74, 0, 0.012529547553116345, 0.253001288604392, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 54.278], [37, 75, 0, 0.027459141274238225, \n 0.5544652029066119, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 118.95299999999999], [72, 75, 0, 0.006688711911357341, \n 0.240108375006292, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 38.634], [37,\n 72, 0, 0.036222068328739615, 0.7314094881920841, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 156.914], [76, 77, 0, 0.004683777700831025, \n 0.6725445900750401, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 54.107], [77,\n 51, 0, 0.00363183864265928, 0.5214964473447999, 3423.0, 3423.0, 3423.0,\n 0, 2, 1, -360, 41.955], [73, 72, 0, 0.025475069252077563, \n 0.514402082018968, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 110.35799999999999], [18, 40, 0, 0.01302770083102493, 0.26306018504072,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 56.43600000000001], [492, 45, 0,\n 0.0308703030303719, 0.18370114733484796, 743.0, 743.0, 743.0, 0, 1, 1, \n -360, 70.03699999999999], [10, 74, 0, 0.030167359187465374, \n 0.609150547206812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 130.685], [45,\n 511, 0, 0.08203371900826446, 0.05424014819960001, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 62.038000000000004], [78, 32, 0, 0.013458795013850415, \n 0.48313777647302397, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 77.738], [\n 79, 80, 0, 0.0038086911357340715, 0.1367226831743568, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 21.999000000000002], [81, 79, 0, \n 0.010767832409972299, 0.3865388099484561, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 62.195], [34, 82, 0, 0.0015497520661157025, \n 0.00409874294399768, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.344], [83, \n 84, 0, 0.00902611570247934, 0.0238720301499152, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 13.652000000000001], [83, 499, 0, 0.04179570247933885, \n 0.0276350398834796, 248.0, 248.0, 248.0, 0, 1, 1, -360, 31.608], [85, \n 86, 0, 0.00802354570637119, 0.28802563884886, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 46.343999999999994], [87, 86, 0, 0.01904968836565097, \n 0.683837154069184, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 110.031], [88,\n 89, 0, 0.00380297520661157, 0.010058007429140002, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 5.752000000000001], [90, 86, 0, 0.012097818559556786, \n 0.434282055192244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 69.877], [91,\n 86, 0, 9.26246537396122e-05, 0.013299992817559201, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 1.07], [86, 92, 0, 0.0001852493074792244, \n 0.0066499964087796005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.07], [\n 86, 93, 0, 0.008152181440443215, 0.292643346635492, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 47.086999999999996], [94, 86, 0, \n 0.012883829639889197, 0.46249792780547194, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 74.417], [86, 95, 0, 0.010421052631578947, 0.37409026526870803,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 60.192], [513, 517, 0, \n 0.0008733884297520661, 0.0023099144321748, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 1.321], [97, 66, 0, 0.03812777008310249, 0.34217338998058805, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 110.113], [42, 98, 0, \n 0.003091759002770083, 0.44394630230884, 3423.0, 3423.0, 3423.0, 0, 2, 1,\n -360, 35.716], [99, 100, 0, 0.016371537396121884, 0.587698093837988, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 94.56200000000001], [42, 101, 0,\n 0.008165339335180054, 0.29311568282888, 1711.0, 1711.0, 1711.0, 0, 1, 1,\n -360, 47.163000000000004], [102, 42, 0, 0.012403047091412742, \n 0.44523901189173193, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 71.64], [\n 103, 87, 0, 0.007073060941828254, 0.25390556381756, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 40.854], [104, 103, 0, 0.0028852146814404432, \n 0.1035721403291428, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.665], [\n 105, 87, 0, 0.006406682825484765, 0.22998422159488002, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 37.005], [106, 107, 0, 0.005714219759923823, \n 0.11538365264216799, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.754], [\n 108, 107, 0, 0.0025427631578947367, 0.09127896939786201, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 14.687000000000001], [109, 106, 0, \n 0.003030470914127424, 0.10878648330773438, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 17.504], [110, 111, 0, 0.019821849030470913, \n 0.7115558306889919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 114.491], [\n 87, 112, 0, 0.006135907202216068, 0.220264039928212, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 35.441], [113, 87, 0, 0.003981648199445983, \n 0.14293141813921081, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 22.998], [\n 87, 85, 0, 0.011046225761772853, 0.3965324494097, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 63.803000000000004], [110, 114, 0, \n 0.011665339335180056, 0.418757110306188, 1711.0, 1711.0, 1711.0, 0, 1, \n 1, -360, 67.37899999999999], [115, 116, 0, 0.007048925619834712, \n 0.07457124214588401, 991.0, 991.0, 991.0, 0, 1, 1, -360, 21.323], [117,\n 118, 0, 0.005987534626038782, 0.21493782785077598, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 34.584], [117, 119, 0, 0.0038738746537396117, \n 0.5562504472696961, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 44.751000000000005], [117, 120, 0, 0.005886686288088643, \n 0.8452704781039522, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 68.003], [\n 121, 122, 0, 0.0021170360110803325, 0.0759964075574972, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 12.228], [123, 124, 0, 0.0018386426592797783, \n 0.0660027680945204, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 10.62], [125,\n 126, 0, 0.004941135734072022, 0.17737467056702802, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 28.54], [127, 119, 0, 0.0029027008310249305, \n 0.1041998502705648, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.766], [\n 118, 128, 0, 0.007397160664819945, 0.265539950057812, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 42.726000000000006], [121, 119, 0, \n 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 14.743], [530, 527, 0, 0.022726611570247933, \n 0.060106736329903994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 34.374], [125,\n 130, 0, 0.002931440443213297, 0.105231531956442, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 16.932000000000002], [125, 123, 0, 0.0019078081717451524,\n 0.2739425623421336, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 22.039], [\n 131, 132, 0, 0.0035744459833795014, 0.12831385593973843, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 20.646], [133, 123, 0, 0.003864439058171745, \n 0.13872389704704202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 22.320999999999998], [524, 134, 0, 0.008092231404958678, \n 0.08560847143881999, 991.0, 991.0, 991.0, 0, 1, 1, -360, 24.479], [135,\n 136, 0, 0.005242901662049862, 0.1882073282678, 1711.0, 1711.0, 1711.0, \n 0, 1, 1, -360, 30.283], [123, 131, 0, 0.003138331024930748, \n 0.1126583971045252, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.127], [\n 117, 128, 0, 0.010800034626038782, 0.38769479063117196, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 62.381], [137, 521, 0, 0.013832396694214875, \n 0.14633421587532003, 991.0, 991.0, 991.0, 0, 2, 1, -360, 41.843], [531,\n 514, 0, 0.0059504132231404955, 0.035409362037522, 743.0, 743.0, 743.0, \n 0, 1, 1, -360, 13.5], [139, 521, 0, 0.021257520661157023, \n 0.05622132386323199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.152], [140,\n 514, 0, 0.018527603305785127, 0.04900131122836401, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 28.023000000000003], [522, 141, 0, 0.012168595041322314,\n 0.032183175718526795, 495.0, 495.0, 495.0, 0, 1, 1, -360, 18.405], [142,\n 523, 0, 0.007060165289256198, 0.0746901476577608, 991.0, 991.0, 991.0, \n 0, 2, 1, -360, 21.357], [530, 526, 0, 0.020281652892561983, \n 0.053640374808152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.676], [140, \n 532, 0, 0.004669090909090909, 0.0123486871461184, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 7.062], [142, 144, 0, 0.006678126721756199, \n 0.0397397958689204, 743.0, 743.0, 743.0, 0, 1, 1, -360, 15.151], [140, \n 522, 0, 0.020450247933884298, 0.05408627047793199, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 30.930999999999997], [145, 146, 0, 0.028527603305785125,\n 0.07544904460236, 495.0, 495.0, 495.0, 0, 1, 1, -360, 43.148], [147, \n 523, 0, 0.02461289256198347, 0.0650955220034416, 495.0, 495.0, 495.0, 0,\n 2, 1, -360, 37.227], [144, 523, 0, 0.008479338842975206, \n 0.0224259292904064, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.825], [139, \n 523, 0, 0.029245619834710742, 0.0193370088934308, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 22.116999999999997], [140, 141, 0, 0.008362975206611572,\n 0.022118173847506, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 12.649000000000001], [528, 526, 0, 0.015389090909090908, \n 0.0407006573227188, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.276], [528, \n 148, 0, 0.014306115702479338, 0.0378364333712244, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 21.638], [149, 150, 0, 0.013604628099173552, \n 0.035981157661543604, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 20.576999999999998], [145, 528, 0, 0.00320595041322314, \n 0.0084790121737992, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.849], [530, \n 151, 0, 0.013144462809917355, 0.0347641247737036, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 19.881], [524, 152, 0, 0.014598347107438016, \n 0.03860931919944, 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.08], [149, 525,\n 0, 0.016897190082644627, 0.17875695122823998, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 51.114], [139, 514, 0, 0.007824132231404959, \n 0.020693056313687997, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 11.834000000000001], [126, 120, 0, 0.012780297783933518, \n 0.458781387757004, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.819], [530,\n 153, 0, 0.02254545454545455, 0.059627617060924, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 34.1], [528, 147, 0, 0.15786710743801652, 0.104380679149868,\n 248.0, 248.0, 248.0, 0, 1, 1, -360, 119.387], [528, 154, 0, \n 0.006528264462809917, 0.017265779790547203, 495.0, 495.0, 495.0, 0, 2, \n 1, -360, 9.874], [130, 120, 0, 0.01450502077562327, 0.5206947188067639,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 83.781], [528, 155, 0, \n 0.16064132231404957, 0.1062149715341, 248.0, 248.0, 248.0, 0, 1, 1, -\n 360, 121.485], [524, 533, 0, 0.004432727272727273, 0.0468942356109744, \n 991.0, 991.0, 991.0, 0, 1, 1, -360, 13.409], [524, 149, 0, \n 0.0056413223140495865, 0.05968007537478799, 991.0, 991.0, 991.0, 0, 2, \n 1, -360, 17.065], [154, 150, 0, 0.007539173553719007, \n 0.0199394052006688, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 11.402999999999999], [157, 110, 0, 0.009962084487534625, \n 0.357614433044424, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 57.541000000000004], [119, 158, 0, 0.0002490189289012004, \n 0.08045252664623159, 5134.0, 5134.0, 5134.0, 0, 3, 1, -360, 4.315], [\n 159, 60, 0, 0.010967451523545706, 0.0984261617997728, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 31.674], [536, 161, 0, 0.021314380165289255, \n 0.056371704363524, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.238], [115, \n 151, 0, 0.00379404958677686, 0.0401376047510724, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 11.477], [162, 134, 0, 0.0015910743801652895, \n 0.016832124393744, 991.0, 991.0, 991.0, 0, 2, 1, -360, 4.813], [115, \n 526, 0, 0.0037884297520661154, 0.010019537998747198, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 5.73], [138, 87, 0, 0.0011838642659279777, \n 0.16999131006813442, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 13.675999999999998], [123, 163, 0, 0.0022778739612188364, \n 0.08177009602828919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.157], [\n 112, 164, 0, 0.0008672957063711912, 0.12453516639176802, 3423.0, 3423.0,\n 3423.0, 0, 2, 1, -360, 10.019], [112, 165, 0, 0.005989439058171744, \n 0.21500619230086396, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.595], [\n 166, 165, 0, 0.002632790858725762, 0.09451074335350361, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 15.207], [167, 537, 0, 0.00832595041322314, \n 0.08808100664460242, 991.0, 991.0, 991.0, 0, 2, 1, -360, 25.186], [168,\n 104, 0, 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 14.743], [531, 520, 0, 0.016156694214876033, \n 0.042730794079516396, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 24.436999999999998], [139, 520, 0, 0.010682314049586776, \n 0.0282522993797748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.157], [520, \n 169, 0, 0.0011328925619834712, 0.0119849761681232, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 3.427], [168, 105, 0, 0.007340893351800554, \n 0.26352009133553606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.401], [\n 520, 170, 0, 0.005842644628099174, 0.015452470732151198, 495.0, 495.0, \n 495.0, 0, 2, 1, -360, 8.837], [171, 89, 0, 0.005505454545454546, \n 0.058242717567848004, 991.0, 991.0, 991.0, 0, 1, 1, -360, 16.654], [521,\n 172, 0, 0.006304793388429752, 0.06669899780522001, 991.0, 991.0, 991.0,\n 0, 1, 1, -360, 19.072], [123, 173, 0, 0.005247403047091413, \n 0.18836891696656402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.309], [\n 521, 174, 0, 0.013300495867768597, 0.035176796844864404, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 20.117], [37, 39, 0, 0.004338873499549862, \n 0.35044859579205606, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 37.592], [\n 530, 175, 0, 0.013128595041322313, 0.0347221581224188, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 19.857], [530, 176, 0, 0.005685289256198347, \n 0.01503630144005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.599], [88, 530,\n 0, 0.006015867768595041, 0.0159106066755372, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 9.099], [177, 496, 0, 0.018632066115702478, \n 0.19711036673178398, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 56.361999999999995], [178, 525, 0, 0.03106842975206612, \n 0.08216895464241199, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 46.99100000000001], [179, 493, 0, 0.057079669421487594, \n 0.15096278779194802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.333], [180,\n 181, 0, 0.041027438016528923, 0.10850827416682, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 62.053999999999995], [182, 180, 0, 0.00866314049586777, \n 0.09164817200545601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 26.206], [179,\n 181, 0, 0.01957223140495868, 0.051764115772731996, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 29.603], [180, 493, 0, 0.06676561983471074, \n 0.17657993119175203, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 100.98299999999999], [183, 30, 0, 0.0024804362880886427, \n 0.356166349712776, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 28.654], [183,\n 21, 0, 0.0025647506925207757, 0.36827307214930394, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 29.628], [538, 185, 0, 0.018631404958677687, \n 0.0123189607681008, 248.0, 248.0, 248.0, 0, 1, 1, -360, 14.09], [538, \n 89, 0, 0.014509752066115702, 0.038375005396288, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 21.945999999999998], [184, 186, 0, 0.0016554709141274237, \n 0.059427351084826, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 9.562000000000001], [184, 187, 0, 0.002698753462603878, \n 0.09687863927102919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.588], [\n 520, 172, 0, 0.0034188429752066113, 0.0361682589818792, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 10.342], [89, 175, 0, 0.0037309090909090903, \n 0.0098674088877672, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.643], [185, \n 89, 0, 0.005812892561983471, 0.0153737832609196, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 8.792], [89, 188, 0, 0.003108760330578513, \n 0.008221966434607202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.702], [189,\n 190, 0, 0.008599492151454294, 0.17364414688031998, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 37.253], [539, 172, 0, 0.0021570247933884296, \n 0.022819366646419197, 991.0, 991.0, 991.0, 0, 2, 1, -360, 6.525], [504,\n 192, 0, 0.0003084297520661157, 0.00326290713886456, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 0.9329999999999999], [105, 186, 0, 0.003273372576177285,\n 0.1175060580379876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.907], [\n 105, 187, 0, 0.0021712257617728533, 0.0779416868808324, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 12.540999999999999], [539, 193, 0, \n 0.005608595041322314, 0.01483346262541, 495.0, 495.0, 495.0, 0, 1, 1, -\n 360, 8.482999999999999], [187, 194, 0, 4.8649584487534626e-05, \n 0.0069856037041576, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.562], [539,\n 540, 0, 0.004394710743801653, 0.0116230138006708, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.647], [539, 196, 0, 0.00332297520661157, \n 0.008788516227194, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.026], [197, \n 540, 0, 0.004737190082644629, 0.012528794024621601, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 7.165], [110, 198, 0, 0.00018724030470914128, \n 0.02688587333118328, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 2.1630000000000003], [197, 539, 0, 0.009172231404958677, \n 0.024258473063998802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 13.873], [199,\n 537, 0, 0.03612826446280991, 0.0238877676441712, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 27.322], [134, 526, 0, 0.007771239669421488, \n 0.020553167475975197, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 11.754000000000001], [200, 193, 0, 0.0009322314049586776, \n 0.009862163056380801, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.82], [4, \n 201, 0, 0.013726108033240996, 0.49273365914097605, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 79.282], [202, 86, 0, 0.00013365650969529087, \n 0.00479794133417816, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.772], [85,\n 203, 0, 0.0019011426592797783, 0.2729854600553416, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 21.962], [147, 204, 0, 0.0073874380165289254, \n 0.0781523963903056, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 22.346999999999998], [147, 205, 0, 0.005959669421487603, \n 0.00394049369636956, 248.0, 248.0, 248.0, 0, 1, 1, -360, 4.507], [123, \n 206, 0, 0.0005753116343490305, 0.0826091142668064, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 6.646], [537, 207, 0, 0.018456198347107437, \n 0.048812461297776, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.915], [165, \n 208, 0, 0.00414612188365651, 0.14883562055771601, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 23.948], [4, 94, 0, 0.013687673130193905, \n 0.49135394025941603, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 79.06], [4,\n 2, 0, 5.2054478301015697e-05, 0.016817654469309, 5134.0, 5134.0, 5134.0,\n 0, 3, 1, -360, 0.902], [209, 4, 0, 0.0022369286703601107, \n 0.32120104149338397, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 25.840999999999998], [119, 163, 0, 0.003535145429362881, \n 0.12690306230914922, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.419], [\n 210, 3, 0, 0.0003150969529085873, 0.011311208844832242, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 1.82], [99, 211, 0, 0.0035045013850415513, \n 0.1258030161741948, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.242], [99,\n 69, 0, 0.021717970914127423, 0.7796219621557, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 125.443], [212, 99, 0, 0.008453774238227147, \n 0.30346978938770003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 48.82899999999999], [213, 214, 0, 0.01490115702479339, \n 0.15764073118032798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 45.076], [510,\n 215, 0, 0.002174710743801653, 0.09202587186721281, 1981.0, 1981.0, \n 1981.0, 0, 4, 1, -360, 13.157], [128, 69, 0, 0.010711651662049862, \n 1.538088234801848, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 123.741], [\n 216, 69, 0, 0.009628462603878117, 1.3825528982351443, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 111.228], [217, 98, 0, 0.0012787396121883656, \n 0.045903620070299994, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 7.386], [\n 504, 218, 0, 0.027480991735537193, 0.072680994226412, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 41.565], [177, 504, 0, 0.07054809917355372, \n 0.18658373169634002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 106.704], [219,\n 209, 0, 0.003938798476454294, 0.5655728721401839, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 45.501000000000005], [219, 220, 0, \n 0.0013026315789473684, 0.1870451326342096, 3423.0, 3423.0, 3423.0, 0, 2,\n 1, -360, 15.048], [94, 95, 0, 0.01070740997229917, 0.38436979242743197,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 61.846000000000004], [159, 221, \n 0, 0.009937153739612188, 0.356719480257712, 1711.0, 1711.0, 1711.0, 0, \n 2, 1, -360, 57.397], [34, 161, 0, 0.010965289256198347, \n 0.116002818645824, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.17], [222, \n 221, 0, 0.0046457756232686975, 0.16677196601221997, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 26.834], [211, 52, 0, 0.05267313019390582, \n 0.472709090515552, 856.0, 856.0, 856.0, 0, 1, 1, -360, 152.12], [215, \n 223, 0, 0.04873190082644628, 0.128884831985184, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 73.707], [224, 215, 0, 0.019086280991735535, \n 0.050478887076288004, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 28.868000000000002], [225, 224, 0, 0.04200925619834711, \n 0.11110496071615601, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 63.538999999999994], [224, 223, 0, 0.031061818181818183, \n 0.082151468537468, 495.0, 495.0, 495.0, 0, 1, 1, -360, 46.981], [226, 6,\n 0, 0.06420099173553719, 0.0424492677936932, 248.0, 248.0, 248.0, 0, 1, \n 1, -360, 48.552], [7, 3, 0, 0.009332929362880887, 0.335029305054692, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 53.907], [216, 227, 0, \n 0.01989941135734072, 0.7143401282507, 1711.0, 1711.0, 1711.0, 0, 1, 1, \n -360, 114.939], [228, 229, 0, 0.010545454545454545, 0.027890337012274, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.95], [227, 230, 0, \n 0.003993074792243767, 0.573366419334696, 3423.0, 3423.0, 3423.0, 0, 2, \n 1, -360, 46.128], [231, 53, 0, 0.007193213296398893, 1.0328749562310842,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 83.096], [544, 545, 0, \n 0.013061818181818181, 0.034545548464856, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 19.756], [234, 235, 0, 0.04608859504132231, 0.121893887321888, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 69.709], [546, 214, 0, \n 0.057025454545454546, 0.15081940173295602, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 86.251], [233, 227, 0, 0.0029001038781163438, 0.1041066260218888,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.750999999999998], [237, 238, \n 0, 0.026324628099173554, 0.06962267451304, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 39.816], [212, 100, 0, 0.007955505540166205, 0.285583163531816, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 45.951], [519, 239, 0, \n 0.01740429752066116, 0.046030422038308406, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 26.324], [238, 519, 0, 0.015166280991735538, 0.040111375593995205,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.939], [213, 240, 0, \n 0.01665388429752066, 0.04404574915373599, 1200.0, 1200.0, 1200.0, 0, 1,\n 1, -360, 25.189], [241, 242, 0, 0.009862015235457064, \n 0.3540221919932281, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 56.963], [70,\n 241, 0, 0.003819858033240997, 0.5484941897752321, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 44.126999999999995], [509, 213, 0, \n 0.011363636363636364, 0.120216969880216, 991.0, 991.0, 991.0, 0, 2, 1, \n -360, 34.375], [68, 243, 0, 0.003611668975069252, 0.1296500701715312, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.861], [243, 244, 0, \n 0.0007699099722991691, 0.027637882270859202, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 4.447], [68, 244, 0, 0.004104051246537396, \n 0.147325387728876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.705], [544,\n 547, 0, 0.02418776859504132, 0.255884661882476, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 73.168], [245, 227, 0, 0.012676419667590028, \n 0.45505241780707606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.219], [\n 246, 208, 0, 0.0010155817174515235, 0.0364568961999408, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 5.8660000000000005], [112, 208, 0, \n 0.0017927631578947367, 0.0643558063672372, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 10.355], [165, 247, 0, 0.0002113919667590028, \n 0.0075884538459086, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 1.2209999999999999], [537, 549, 0, 0.00032066115702479337, \n 0.00084807607842936, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.485], [537, \n 550, 0, 0.00032198347107438016, 0.0008515732993697601, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.48700000000000004], [537, 551, 0, \n 0.0002651239669421488, 0.0007011927988648, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 0.401], [110, 251, 0, 0.00023857340720221602, \n 0.008564200982522441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 1.3780000000000001], [510, 252, 0, 0.08467702479338843, \n 0.055987884365424005, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 64.03699999999999], [529, 253, 0, 0.04859504132231405, \n 0.12852286961777998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.5], [237, \n 239, 0, 0.03309421487603306, 0.08752669712542799, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 50.055], [254, 238, 0, 0.07815008264462811, \n 0.05167231372274401, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 59.101000000000006], [69, 255, 0, 0.0009369806094182826, \n 0.134541235754472, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 10.824000000000002], [510, 225, 0, 0.021953719008264466, \n 0.232250442756508, 991.0, 991.0, 991.0, 0, 1, 1, -360, 66.41], [256, \n 257, 0, 0.010125619834710746, 0.0267799693631888, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 15.315], [258, 190, 0, 0.011717451523545707, \n 0.10515695255750121, 856.0, 856.0, 856.0, 0, 1, 1, -360, 33.84], [258, \n 259, 0, 0.015782548476454293, 0.1416387085570408, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 45.58], [260, 261, 0, 0.006791031855955679, \n 0.9751256416231477, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 78.45], [554,\n 553, 0, 0.17583338842975205, 0.11625986438453201, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 132.974], [515, 263, 0, 0.006987107438016529, \n 0.0739172618295936, 991.0, 991.0, 991.0, 0, 2, 1, -360, 21.136], [14, \n 264, 0, 0.01700694214876033, 0.17991802858084, 991.0, 991.0, 991.0, 0, \n 1, 1, -360, 51.446000000000005], [116, 555, 0, 0.0009768595041322315, \n 0.0103342878835768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.955], [151, \n 116, 0, 0.007244958677685951, 0.0191612735410668, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 10.958], [111, 114, 0, 0.008806613573407202, \n 0.3161358573133961, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.867], [77,\n 111, 0, 0.00288452216066482, 0.41418912211817605, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 33.321999999999996], [266, 525, 0, \n 0.01042909090909091, 0.027582581569373602, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 15.774000000000001], [267, 120, 0, 0.013136945983379503, \n 0.471584184581432, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 75.87899999999999], [268, 269, 0, 0.0010327272727272726, \n 0.0027313295556817604, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 1.5619999999999998], [556, 271, 0, 0.052289586776859506, \n 0.0345735262323792, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 39.544000000000004], [556, 272, 0, 0.04685355371900827, \n 0.030979257409249603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 35.433], [529,\n 273, 0, 0.0034604958677685953, 0.009152227205140799, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 5.234], [128, 274, 0, 0.0029350761772853184, \n 0.1053620459045884, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.953], [34,\n 275, 0, 0.0008290909090909092, 0.00054818938265696, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 0.627], [503, 276, 0, 0.006707438016528925, \n 0.07095861291266, 991.0, 991.0, 991.0, 0, 2, 1, -360, 20.29], [503, 504,\n 0, 0.06432727272727272, 0.680524223098808, 991.0, 991.0, 991.0, 0, 2, 1,\n -360, 194.59], [177, 218, 0, 0.04330380165289256, 0.114528740018308, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 65.497], [277, 278, 0, \n 0.007191135734072023, 1.032576638635032, 3423.0, 3423.0, 3423.0, 0, 2, \n 1, -360, 83.072], [557, 558, 0, 0.04341289256198347, 0.258338836678648,\n 743.0, 743.0, 743.0, 0, 1, 1, -360, 98.493], [557, 559, 0, \n 0.03415867768595042, 0.09034195998366001, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 51.665], [559, 558, 0, 0.04474314049586777, 0.11833546501370001, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 67.67399999999999], [277, 78, 0, \n 0.03585768698060942, 0.32180078416049196, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 103.557], [277, 279, 0, 0.021390927977839334, 0.191970480441328, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 61.777], [78, 279, 0, \n 0.015811980609418283, 0.1419028439283376, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 45.665], [281, 282, 0, 0.0023178670360110803, 0.08320574945862161,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.388], [283, 161, 0, \n 0.036741157024793386, 0.09717203248350399, 495.0, 495.0, 495.0, 0, 2, 1,\n -360, 55.571000000000005], [268, 161, 0, 0.018883636363636366, \n 0.199771751868832, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 57.123000000000005], [256, 284, 0, 0.010755371900826446, \n 0.113782083346976, 991.0, 991.0, 991.0, 0, 2, 1, -360, 32.535], [515, \n 516, 0, 0.04071140495867769, 0.107672438361532, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 61.576], [263, 516, 0, 0.0030355371900826445, \n 0.128452925198488, 1981.0, 1981.0, 1981.0, 0, 2, 1, -360, 18.365], [516,\n 285, 0, 0.006908429752066116, 0.018271230811372, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 10.449000000000002], [63, 286, 0, 0.019088925619834708, \n 0.050485881518556, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.872], [287, \n 516, 0, 0.01732892561983471, 0.011457770111127998, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 13.105], [8, 102, 0, 0.015100069252077563, \n 0.542055501663692, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 87.21799999999999], [8, 101, 0, 0.019246883656509697, 0.69091598202144,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 111.17], [80, 288, 0, \n 0.007984072022160666, 0.2866086302684072, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 46.11600000000001], [80, 289, 0, 0.0003782317636201524, \n 0.122198345223416, 5134.0, 5134.0, 5134.0, 0, 4, 1, -360, \n 6.553999999999999], [276, 560, 0, 0.01778314049586777, \n 0.047032375838192794, 495.0, 495.0, 495.0, 0, 2, 1, -360, 26.897], [37,\n 290, 0, 0.005629501385041551, 0.4546919507138321, 2567.0, 2567.0, \n 2567.0, 0, 2, 1, -360, 48.773999999999994], [290, 74, 0, \n 0.02071595106187673, 1.673216783321968, 2567.0, 2567.0, 2567.0, 0, 2, 1,\n -360, 179.483], [512, 291, 0, 0.0053299173553719, 0.056385693247479204,\n 991.0, 991.0, 991.0, 0, 2, 1, -360, 16.123], [78, 292, 0, \n 0.0058149815327908595, 0.469673087481408, 2567.0, 2567.0, 2567.0, 0, 2,\n 1, -360, 50.381], [199, 548, 0, 0.0015530578512396695, \n 0.00410748599634868, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.349], [491, \n 293, 0, 0.014176528925619833, 0.009373426429729999, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 10.720999999999998], [4, 294, 0, 9.669321329639889e-05, \n 0.013884198109531681, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 1.117], [\n 490, 541, 0, 0.050580495867768596, 0.133773946861896, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 76.503], [491, 295, 0, 0.010613553719008264, \n 0.028070443890777202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.053], [491,\n 296, 0, 0.004400661157024794, 0.0116387512948784, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.656000000000001], [295, 297, 0, 0.020297520661157024, \n 0.053682341459340005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.7], [508, \n 161, 0, 0.023239669421487603, 0.061463658055360006, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 35.15], [117, 123, 0, 0.005876211911357341, \n 0.21094161505628, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.941], [133,\n 117, 0, 0.004469182825484764, 0.0401081792747688, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 12.907], [71, 74, 0, 0.03904524469065097, \n 0.7884161162841721, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 169.144], [\n 74, 278, 0, 0.0077122576177285325, 1.10740463560792, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 89.09200000000001], [298, 515, 0, \n 0.021701157024793388, 0.05739464148919599, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 32.823], [5, 299, 0, 0.0016232686980609415, 0.058271370400665996,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 9.376], [32, 292, 0, \n 0.009679362880886427, 0.34746541983297996, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 55.908], [5, 29, 0, 0.00743395083102493, 1.0674425076571843, \n 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 85.87700000000001], [503, 560, 0,\n 0.015140495867768593, 0.160172719142436, 991.0, 991.0, 991.0, 0, 1, 1, \n -360, 45.8], [300, 301, 0, 0.004892053324099723, 0.7024509290644521, \n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 56.513000000000005], [51, 300, 0,\n 0.002573493767313019, 0.3695284920307039, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 29.729], [244, 302, 0, 0.007714508310249307, 1.107727813004004,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 89.118], [31, 302, 0, \n 0.004369113573407203, 0.6273619041941161, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 50.472], [51, 282, 0, 0.006288434903047093, 0.9029576432132521,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 72.64399999999999], [303, 304, 0,\n 8.795013850415512e-05, 0.000789298639172312, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 0.254], [305, 304, 0, 0.003881117266849031, 0.0783689646873844,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 16.813], [305, 259, 0, 0.0025625,\n 0.36794989475177603, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 29.601999999999997], [306, 307, 0, 0.03223268698060942, \n 0.289268628831688, 856.0, 856.0, 856.0, 0, 1, 1, -360, 93.088], [305, \n 308, 0, 0.0024272853185595567, 0.0217833994511184, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 7.01], [305, 309, 0, 0.011014773776523545, \n 0.22241441259921202, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 47.716], [\n 310, 309, 0, 0.009565962603878117, 0.343394627639832, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 55.253], [306, 309, 0, 0.035333795013850415, \n 0.31709917455019604, 856.0, 856.0, 856.0, 0, 1, 1, -360, 102.044], [311,\n 280, 0, 0.003433691135734072, 0.1232611016590444, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 19.833], [280, 278, 0, 0.009749769159764544, \n 0.7874838737974121, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, \n 84.47200000000001], [311, 32, 0, 0.01205909510619806, \n 0.9740069506375919, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 104.48], [13,\n 312, 0, 0.0043324965373961214, 0.622104056565324, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 50.049], [313, 314, 0, 0.006092624653739613, \n 0.218710302449316, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.191], [312,\n 313, 0, 0.00893957756232687, 0.32090893884734, 1711.0, 1711.0, 1711.0, \n 0, 1, 1, -360, 51.635], [547, 566, 0, 0.027035702479338848, \n 0.286013220297816, 991.0, 991.0, 991.0, 0, 1, 1, -360, 81.783], [245, \n 315, 0, 0.014162569252077564, 0.508401547875772, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 81.803], [312, 316, 0, 8.803670360110802e-05, \n 0.01264120812658816, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.0170000000000001], [312, 314, 0, 0.005339854570637119, \n 0.191687700220296, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 30.843000000000004], [554, 546, 0, 0.08174743801652892, \n 0.21620344446439202, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 123.64299999999999], [262, 216, 0, 0.042641966759002774, \n 0.38268554099981195, 856.0, 856.0, 856.0, 0, 1, 1, -360, 123.15], [317,\n 233, 0, 0.005647276084951523, 0.114031901035644, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 24.464000000000002], [318, 317, 0, 0.008311634349030471,\n 0.16783161497270002, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 36.006], [\n 231, 52, 0, 0.035263677285318554, 1.2658796434850879, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 203.683], [319, 567, 0, 0.006089586776859504, \n 0.0644223069721, 991.0, 991.0, 991.0, 0, 1, 1, -360, 18.421], [557, 321,\n 0, 0.010004628099173555, 0.10583989458750401, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 30.264], [277, 65, 0, 0.009430170821779778, 0.7616700793261759,\n 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 81.703], [322, 288, 0, \n 0.006545013850415513, 0.528637424797136, 2567.0, 2567.0, 2567.0, 0, 2, \n 1, -360, 56.706], [322, 323, 0, 0.0018503000923372577, 0.14944779312484,\n 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 16.031], [277, 324, 0, \n 0.019719529085872576, 0.39818407235049996, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 85.425], [324, 325, 0, 0.01103508771932133, \n 0.22282459929396403, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 47.803999999999995], [277, 325, 0, 0.008665743305609418, \n 0.174981914850048, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 37.54], [326,\n 327, 0, 0.007654214876033058, 0.0202436634226288, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 11.577], [328, 326, 0, 0.10300958677685952, \n 0.068109252150368, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 77.90100000000001], [328, 327, 0, 0.09827173553719008, \n 0.064976616491468, 248.0, 248.0, 248.0, 0, 1, 1, -360, 74.318], [326, \n 329, 0, 0.028062148760330575, 0.07421802283046801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 42.443999999999996], [568, 329, 0, 0.05699900826446282, \n 0.15074945731414802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.211], [568,\n 326, 0, 0.03218644628099173, 0.08512585494846397, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 48.681999999999995], [332, 78, 0, 0.006471029547541551, \n 0.522661750455416, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 56.065], [333,\n 306, 0, 0.008580159279778392, 0.308006702824228, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 49.559], [332, 333, 0, 0.007504674515235457, \n 0.26939943395502003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 43.347], [\n 332, 334, 0, 0.017124653739612188, 0.15368328149175597, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 49.456], [66, 334, 0, 0.030625, \n 0.27484062260471603, 856.0, 856.0, 856.0, 0, 1, 1, -360, 88.445], [330,\n 335, 0, 0.00550536703601108, 0.790516769355108, 3423.0, 3423.0, 3423.0,\n 0, 1, 1, -360, 63.598], [336, 66, 0, 0.015054362880886425, \n 0.1351036887216764, 856.0, 856.0, 856.0, 0, 1, 1, -360, 43.477], [330, \n 336, 0, 0.039036357340720224, 0.350327404269788, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 112.73700000000001], [68, 70, 0, 0.016314058171745152, \n 0.14640868261713597, 856.0, 856.0, 856.0, 0, 1, 1, -360, 47.115], [509,\n 337, 0, 0.03494082644628099, 0.09241056617056001, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 52.848], [324, 288, 0, 0.012627423822714683, \n 0.11332339674541761, 856.0, 856.0, 856.0, 0, 1, 1, -360, 36.468], [338,\n 559, 0, 0.009228099173553718, 0.097624922595552, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 27.915], [339, 559, 0, 0.03560595041322315, \n 0.023542417076125203, 248.0, 248.0, 248.0, 0, 1, 1, -360, 26.927], [339,\n 340, 0, 0.08711537190082644, 0.23040041287850396, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 131.762], [559, 340, 0, 0.20983272727272728, \n 0.138740000599684, 248.0, 248.0, 248.0, 0, 1, 1, -360, 158.686], [341, \n 292, 0, 0.0009329409048961218, 0.07535316024134399, 2567.0, 2567.0, \n 2567.0, 0, 1, 1, -360, 8.083], [557, 342, 0, 0.006019834710743802, \n 0.0636843933534336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 18.21], [558, \n 343, 0, 0.010650247933884296, 0.11266996708783199, 991.0, 991.0, 991.0,\n 0, 1, 1, -360, 32.217], [502, 340, 0, 0.021737520661157025, \n 0.22996326026071198, 991.0, 991.0, 991.0, 0, 2, 1, -360, 65.756], [72, \n 32, 0, 0.00675502077562327, 0.969954803293024, 3423.0, 3423.0, 3423.0, \n 0, 2, 1, -360, 78.03399999999999], [344, 345, 0, 0.0005762927054480609,\n 0.04654686738645321, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 4.993], [\n 346, 47, 0, 0.0011340027700831024, 0.04070792194158799, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 6.55], [46, 47, 0, 0.0008975069252077563, \n 0.0322183003580208, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.184], [346,\n 345, 0, 0.0007217797783933517, 0.025910126194627202, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 4.169], [347, 328, 0, 0.029905454545454544, \n 0.07909314882361201, 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.232], [347,\n 348, 0, 0.04883438016528925, 0.129155866607944, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 73.862], [571, 348, 0, 0.041548429752066116, \n 0.10988617921762801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 62.842], [347,\n 572, 0, 0.016052231404958678, 0.04245451362512801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 24.279], [571, 570, 0, 0.17379041322314048, \n 0.11490906279551602, 248.0, 248.0, 248.0, 0, 1, 1, -360, 131.429], [14,\n 350, 0, 0.02166743801652892, 0.05730546235524, 495.0, 495.0, 495.0, 0, \n 1, 1, -360, 32.772], [350, 573, 0, 0.026277685950413226, \n 0.06949852316919598, 495.0, 495.0, 495.0, 0, 1, 1, -360, 39.745], [15, \n 351, 0, 0.02639265927977839, 0.236857956201204, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 76.222], [352, 15, 0, 0.0015260560941828254, \n 0.219126704094076, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.629], [15,\n 335, 0, 0.0035338758079432133, 1.1417173740880242, 5134.0, 5134.0, \n 5134.0, 0, 1, 1, -360, 61.235], [232, 227, 0, 5.5747922437673134e-05, \n 0.000500303468136644, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 0.161], [\n 565, 544, 0, 0.0394803305785124, 0.10441652566461601, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 59.714], [235, 567, 0, 0.02391404958677686, \n 0.25298896294275997, 991.0, 991.0, 991.0, 0, 1, 1, -360, 72.34], [567, \n 286, 0, 0.008068760330578512, 0.34144067500694797, 1981.0, 1981.0, \n 1981.0, 0, 1, 1, -360, 48.816], [353, 519, 0, 0.007621818181818182, \n 0.080631926038356, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 23.055999999999997], [354, 353, 0, 0.0008436363636363636, \n 0.00892490784392768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.552], [355, \n 354, 0, 0.0068502479338842966, 0.0181173530898976, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.360999999999999], [354, 356, 0, 0.01855404958677686, \n 0.049071255647172, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 28.063000000000002], [357, 358, 0, 0.0034823407202216067, \n 0.5000300103406239, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 40.228], [\n 574, 359, 0, 0.013352066115702478, 0.0353131884615884, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 20.195], [235, 575, 0, 0.007459504132231404, \n 0.0789147905557, 991.0, 991.0, 991.0, 0, 1, 1, -360, 22.565], [167, 361,\n 0, 0.000616198347107438, 0.0065188198358579995, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 1.864], [528, 362, 0, 0.0011960330578512398, \n 0.012652945368078402, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 3.6180000000000003], [363, 344, 0, 0.0002662742382271468, \n 0.009558592968871479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.538], [\n 259, 364, 0, 0.013069713758102496, 0.26390852570525997, 1283.0, 1283.0,\n 1283.0, 0, 1, 1, -360, 56.618], [54, 56, 0, 0.007723337950138504, \n 0.0693122289241068, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.305], [365, \n 364, 0, 0.0049974607571537395, 0.10091058802821559, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 21.649], [231, 366, 0, 0.0013273891966759002, \n 0.0476500209962672, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, \n 7.667000000000001], [30, 367, 0, 0.01126108033240997, \n 0.1010613005635992, 856.0, 856.0, 856.0, 0, 1, 1, -360, 32.522], [61, \n 367, 0, 0.020337603878116343, 0.18251754162067196, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 58.735], [254, 368, 0, 0.0004297520661157025, \n 0.00454638722456732, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.3], [254, \n 369, 0, 0.00015999999999999999, 0.00169265493591832, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 0.484], [254, 370, 0, 0.0003669421487603306, \n 0.0038819152455960805, 991.0, 991.0, 991.0, 0, 2, 1, -360, 1.11], [99, \n 358, 0, 0.0020184383656509696, 0.28982797432374396, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 23.316999999999997], [354, 519, 0, \n 0.006762644628099174, 0.07154264880985199, 991.0, 991.0, 991.0, 0, 1, 1,\n -360, 20.457], [571, 371, 0, 0.023726942148760328, 0.06275238397221199,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 35.887], [207, 372, 0, \n 0.002329256198347108, 0.006160354689297601, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 3.523], [57, 373, 0, 0.0017725619834710745, \n 0.0046880246727212796, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.681], [209,\n 374, 0, 0.0010122922437673131, 0.0363388121515216, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 5.847], [375, 376, 0, 0.0045364727608518006, \n 0.0916021467933684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 19.652], [\n 376, 377, 0, 0.0030886426592797783, 0.062367022394423606, 1283.0, \n 1283.0, 1283.0, 0, 1, 1, -360, 13.38], [16, 49, 0, 0.002266101108033241,\n 0.32538991773524, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 26.178], [318,\n 377, 0, 0.004755078485685596, 0.0960163149704152, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 20.599], [378, 297, 0, 0.01753917355371901, \n 0.046387138574374404, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 26.528000000000002], [562, 379, 0, 0.01802314049586777, \n 0.047667121439141605, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.26], [576,\n 563, 0, 0.001808264462809917, 0.004782449638150801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 2.735], [576, 381, 0, 0.0034320661157024794, \n 0.009077036954898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.191], [577, \n 576, 0, 0.06004495867768594, 0.15880530575430396, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 90.818], [244, 383, 0, 0.006845567867036011, \n 0.1382282547912684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 29.655], [\n 244, 306, 0, 0.02679108956599723, 0.5409756541164079, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 116.059], [383, 306, 0, 0.0300685595567867, \n 0.269846910348376, 856.0, 856.0, 856.0, 0, 1, 1, -360, 86.838], [380, \n 306, 0, 0.00025605955678670365, 0.03676764369572, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 2.958], [252, 225, 0, 0.062094545454545444, \n 0.041056499553586, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 46.958999999999996], [220, 76, 0, 0.002772074099722992, \n 0.398042682239984, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 32.023], [542,\n 384, 0, 0.007939834710743802, 0.020999063146094, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 12.009], [385, 384, 0, 0.053734876033057856, \n 0.035529141854791196, 248.0, 248.0, 248.0, 0, 1, 1, -360, 40.637], [542,\n 385, 0, 0.011306115702479337, 0.119608453436296, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 34.201], [386, 385, 0, 0.003668760330578512, \n 0.0388121580140316, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 11.097999999999999], [387, 578, 0, 0.015444628099173553, \n 0.16339016240905604, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.72], [332, \n 388, 0, 0.014036184210526315, 0.5038646344377999, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 81.07300000000001], [382, 332, 0, \n 0.017764369806094183, 0.637697365901468, 1711.0, 1711.0, 1711.0, 0, 1, \n 1, -360, 102.60700000000001], [382, 388, 0, 0.00476159972299169, \n 0.17092976750548, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 27.503], [579,\n 578, 0, 0.01911074380165289, 0.050543585664, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 28.905], [577, 387, 0, 0.07597818181818182, \n 0.20094506949431204, 495.0, 495.0, 495.0, 0, 1, 1, -360, 114.917], [144,\n 390, 0, 0.0004277685950413223, 0.0011313509747276, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 0.647], [37, 49, 0, 0.008441481994459835, \n 0.303028527944352, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 48.758], [391,\n 233, 0, 0.014211218836565096, 0.1275369872004348, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 41.042], [392, 310, 0, 0.007035318559556785, \n 0.06313767618386361, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 20.317999999999998], [260, 393, 0, 0.006341412742382271, \n 0.0569102963692744, 856.0, 856.0, 856.0, 0, 1, 1, -360, 18.314], [394, \n 230, 0, 0.0007590027700831025, 0.00681158510656168, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 2.1919999999999997], [395, 282, 0, 0.008762984764542936,\n 0.314569689934484, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.615], [395,\n 244, 0, 0.0034046052631578946, 0.12221699007344, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 19.665], [25, 396, 0, 0.008809037396121884, \n 0.316222866612064, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.881], [81,\n 74, 0, 0.0075207756232686974, 0.26997742429652244, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 43.44], [278, 80, 0, 0.016286011080332407, \n 0.5846279085788, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 94.068], [81, \n 278, 0, 0.021054016620498613, 0.755787629231688, 1711.0, 1711.0, 1711.0,\n 0, 2, 1, -360, 121.60799999999999], [569, 570, 0, 0.03253950413223141, \n 0.08605961294018, 495.0, 495.0, 495.0, 0, 1, 1, -360, 49.216], [397, \n 552, 0, 0.006289586776859504, 0.0166345314104904, 1200.0, 1200.0, \n 1200.0, 0, 1, 1, -360, 9.513], [542, 398, 0, 0.0005580165289256199, \n 0.0059033089500572, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 1.6880000000000002], [398, 385, 0, 0.021893553719008262, \n 0.05790348713648401, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 33.114000000000004], [399, 499, 0, 0.03266380165289256, \n 0.021597087927192803, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 24.701999999999998], [83, 399, 0, 0.025700495867768593, \n 0.016992996557050798, 248.0, 248.0, 248.0, 0, 1, 1, -360, 19.436], [498,\n 400, 0, 0.012134214876033058, 0.032092247974028, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 18.352999999999998], [518, 239, 0, 0.04685289256198347, \n 0.123915281026504, 495.0, 495.0, 495.0, 0, 1, 1, -360, 70.865], [575, \n 543, 0, 0.0030307438016528923, 0.032062521596058796, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 9.168], [401, 360, 0, 0.007957063711911357, \n 0.071409774520472, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.98], [580, \n 581, 0, 0.007134545454545454, 0.018869255592422397, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.790999999999999], [401, 402, 0, 0.0033434903047091418,\n 0.030005778188384805, 856.0, 856.0, 856.0, 0, 1, 1, -360, 9.656], [403,\n 231, 0, 0.009592105263157893, 0.08608327126915, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 27.701999999999998], [189, 360, 0, 0.028456024930747923, \n 0.255375399471348, 856.0, 856.0, 856.0, 0, 1, 1, -360, 82.181], [234, \n 404, 0, 0.008092561983471074, 0.0214029921648796, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 12.24], [235, 404, 0, 0.05107504132231405, \n 0.13508190749437998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 77.251], [235,\n 580, 0, 0.000580495867768595, 0.00153527999352772, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 0.878], [216, 259, 0, 0.0022115650969529088, \n 0.079389770210892, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, \n 12.774000000000001], [405, 259, 0, 0.0052832409972299165, \n 0.1896554115982928, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 30.516], [\n 405, 318, 0, 0.0066348684210526315, 0.23817552558268398, 1711.0, 1711.0,\n 1711.0, 0, 2, 1, -360, 38.323], [406, 230, 0, 8.098164819944598e-05, \n 0.046512685161986804, 6845.0, 6845.0, 6845.0, 0, 1, 1, -360, 1.871], [\n 542, 407, 0, 0.025569586776859506, 0.067625761355152, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 38.674], [23, 408, 0, 0.03224528925619835, \n 0.08528148128033601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 48.771], [577,\n 348, 0, 0.012999008264462809, 0.13751772188026398, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 39.321999999999996], [562, 564, 0, 0.06921520661157024, \n 0.18305853298686803, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 104.68799999999999], [582, 507, 0, 0.006357685950413223, \n 0.016814638289042002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.616], [27, \n 410, 0, 0.0030042975206611565, 0.007945685980170399, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 4.544], [501, 27, 0, 0.003811570247933884, \n 0.040322957460962, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.53], [27, 411,\n 0, 0.004648595041322314, 0.012294480221518, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 7.031000000000001], [411, 410, 0, 0.002054214876033058, \n 0.0054329327333556, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 3.1069999999999998], [403, 360, 0, 0.008191481994459833, \n 0.07351353506655639, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 23.656999999999996], [412, 360, 0, 0.016761772853185596, \n 0.15042664773666, 856.0, 856.0, 856.0, 0, 1, 1, -360, 48.408], [326, \n 413, 0, 0.012077024793388432, 0.12776397267356798, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 36.533], [414, 413, 0, 0.008093223140495867, \n 0.08561896310149601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 24.482], [6, \n 297, 0, 0.019472396694214876, 0.0128750188978664, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 14.725999999999999], [554, 580, 0, 0.07435371900826447, \n 0.196648733567264, 495.0, 495.0, 495.0, 0, 1, 1, -360, 112.46], [262, \n 401, 0, 0.03931232686980609, 0.35280406181043206, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 113.53399999999999], [499, 556, 0, 0.04185586776859504, \n 0.11069928308639199, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 63.306999999999995], [224, 229, 0, 0.004135206611570248, \n 0.0437467367631624, 991.0, 991.0, 991.0, 0, 1, 1, -360, 12.509], [583, \n 507, 0, 0.024632727272727268, 0.065147980317596, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 37.257], [415, 307, 0, 0.015675554016620498, \n 0.1406784987952448, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.271], [416, \n 507, 0, 0.0010555371900826446, 0.011166626467730801, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 3.193], [284, 561, 0, 0.015221487603305786, \n 0.16102953827307598, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.045], [543,\n 417, 0, 0.0006614876033057851, 0.027991756419545603, 1981.0, 1981.0, \n 1981.0, 0, 4, 1, -360, 4.002], [418, 506, 0, 0.0009395041322314049, \n 0.009939101917118, 991.0, 991.0, 991.0, 0, 1, 1, -360, 2.842], [220, \n 157, 0, 0.004599549861495845, 0.165112574384632, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 26.566999999999997], [295, 419, 0, 0.0012023140495867769,\n 0.012719392565946, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.637], [295, \n 420, 0, 0.0008003305785123967, 0.008466771900532, 991.0, 991.0, 991.0, \n 0, 1, 1, -360, 2.421], [541, 62, 0, 0.05133355371900827, \n 0.0339414035471236, 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.821], [52, \n 421, 0, 0.00013885041551246538, 0.004984389831631239, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 0.802], [60, 160, 0, 6.128808864265928e-05, \n 0.000550023067454096, 856.0, 856.0, 856.0, 0, 2, 1, -360, 0.177], [535,\n 161, 0, 3.735537190082645e-05, 0.00039518596644331203, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 0.113], [267, 282, 0, 0.0065652700831024926, \n 0.235677115717012, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 37.921], [52,\n 365, 0, 0.007655586334279779, 0.15458444922992, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 33.164], [28, 27, 0, 0.015726942148760328, \n 0.041594197273402404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.787], [30,\n 201, 0, 0.009128289473684211, 0.327683234253536, 1711.0, 1711.0, 1711.0,\n 0, 2, 1, -360, 52.725], [422, 81, 0, 0.0004226685133887349, \n 0.13655487952674, 5134.0, 5134.0, 5134.0, 0, 6, 1, -360, 7.324], [119, \n 425, 0, 0.003579120498614958, 0.1284816595874996, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 20.673000000000002], [423, 425, 0, \n 0.0006518351800554017, 0.0233992864289392, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 3.765], [424, 425, 0, 0.005922957063711911, \n 0.21261965153389198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.211], [\n 426, 428, 0, 0.013948429752066116, 0.14756174042535197, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 42.193999999999996], [427, 428, 0, \n 0.0002664462809917355, 0.0028187600792304794, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 0.8059999999999999], [19, 428, 0, 0.023607603305785128, \n 0.24974703912892798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 71.413], [45, \n 429, 0, 0.02562314049586777, 0.067767398802972, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 38.755], [44, 429, 0, 5.289256198347107e-05, \n 0.00013988883767892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.08], [505, \n 429, 0, 0.006012561983471073, 0.015901863623161996, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 9.094], [231, 431, 0, 0.011677285318559558, \n 0.4191859418495199, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 67.44800000000001], [190, 431, 0, 0.009600761772853185, \n 0.34464383257266795, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 55.45399999999999], [430, 431, 0, 0.0028100761772853187, \n 0.1008748520662472, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 16.230999999999998], [286, 433, 0, 0.01568694214876033, \n 0.16595362535967603, 991.0, 991.0, 991.0, 0, 1, 1, -360, 47.453], [432,\n 433, 0, 0.00010049586776859504, 0.00106315516636076, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 0.304], [506, 433, 0, 0.0065904132231404955, \n 0.06972059669946801, 991.0, 991.0, 991.0, 0, 1, 1, -360, 19.936], [23, \n 434, 0, 0.02613685950413223, 0.069126069139116, 495.0, 495.0, 495.0, 0,\n 2, 1, -360, 39.532], [400, 434, 0, 0.008155371900826446, \n 0.021569110159669603, 495.0, 495.0, 495.0, 0, 2, 1, -360, 12.335], [500,\n 434, 0, 0.006338512396694216, 0.0167639285853336, 495.0, 495.0, 495.0, \n 0, 2, 1, -360, 9.587], [32, 436, 0, 0.0044813019390581715, \n 0.16086776359270402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 25.884], [\n 435, 436, 0, 0.0006634349030470914, 0.023815688073266, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 3.832], [78, 436, 0, 0.00897680055401662, \n 0.32224515307884394, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.85], [86,\n 438, 0, 0.014693213296398892, 0.52745036936438, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 84.868], [437, 438, 0, 1.0387811634349031e-05, \n 0.0003728969948845, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.06], [221,\n 438, 0, 0.002280124653739612, 0.081850890377238, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 13.17], [207, 439, 0, 0.055703801652892564, \n 0.0368309823503996, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 42.126000000000005], [516, 439, 0, 0.05448462809917355, \n 0.03602487292327441, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 41.20399999999999], [513, 439, 0, 0.046726611570247926, \n 0.0308953241066316, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 35.336999999999996], [181, 441, 0, 0.040805289256198356, \n 0.10792074104825197, 495.0, 495.0, 495.0, 0, 1, 1, -360, 61.718], [440,\n 441, 0, 0.0001322314049586777, 0.000349722094197784, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.2], [504, 441, 0, 0.05916099173553719, \n 0.156467413554364, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 89.48100000000001], [135, 442, 0, 0.004956890581717451, \n 0.177940231009092, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 28.631], [109,\n 442, 0, 0.0015380886426592797, 0.055213615042649204, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 8.884], [112, 442, 0, 0.0027304362880886425, \n 0.09801597510545401, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 15.770999999999999], [113, 443, 0, 0.0019885734072022164, \n 0.07138491472072879, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 11.485999999999999], [132, 443, 0, 0.006788434903047091, \n 0.24368818615747198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 39.21], [\n 107, 443, 0, 2.2333795013850418e-05, 0.000801728539002036, 1711.0, \n 1711.0, 1711.0, 0, 1, 1, -360, 0.129], [444, 445, 0, \n 7.877423822714682e-05, 0.00282780221121528, 1711.0, 1711.0, 1711.0, 0, \n 1, 1, -360, 0.455], [112, 445, 0, 0.002816135734072022, \n 0.101092375313206, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.266], [109,\n 445, 0, 0.0014354224376731304, 0.0515281497432104, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 8.291], [119, 447, 0, 0.005212690443213296, \n 0.74849127803204, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 60.217], [100,\n 447, 0, 0.0050695117728531865, 0.7279322237145921, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 58.563], [446, 447, 0, 2.9518698060941832e-05, \n 0.00423859584186224, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.341], [\n 124, 448, 0, 6.509695290858726e-05, 0.00233682116794768, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 0.376], [125, 448, 0, 0.00615148891966759, \n 0.22082338542026803, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.531], [\n 131, 448, 0, 3.912742382271468e-05, 0.0014045786807313759, 1711.0, \n 1711.0, 1711.0, 0, 1, 1, -360, 0.226], [449, 450, 0, \n 0.0023614958448753462, 0.08477191683710039, 1711.0, 1711.0, 1711.0, 0, \n 1, 1, -360, 13.64], [173, 450, 0, 0.002862361495844876, \n 0.10275176694050518, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.533], [\n 184, 450, 0, 0.004022853185595568, 0.14441057621844403, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 23.236], [144, 451, 0, 0.007672727272727273, \n 0.020292624515794402, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.605], [140,\n 451, 0, 0.006991074380165291, 0.018489807120219602, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.574000000000002], [514, 451, 0, 0.01149289256198347, \n 0.030396095817207994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.383], [537,\n 585, 0, 0.05072595041322314, 0.134158641165824, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 76.723], [141, 585, 0, 0.007994710743801653, \n 0.0211441978151932, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.092], [584, \n 585, 0, 9.256198347107438e-05, 0.000244805465938352, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.14], [522, 454, 0, 0.0035008264462809916, \n 0.0092588924438956, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.295], [144, \n 454, 0, 0.00452892561983471, 0.011977981726290799, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 6.85], [453, 454, 0, 0.001114710743801653, \n 0.0029481572540882, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.686], [199, \n 456, 0, 0.013063140495867768, 0.0086372614214612, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 9.879], [140, 456, 0, 0.005061818181818182, \n 0.013387361765852802, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 7.656000000000001], [455, 456, 0, 0.0011365289256198346, \n 0.00300586139962416, 495.0, 495.0, 495.0, 0, 2, 1, -360, 1.719], [537, \n 456, 0, 0.039058512396694216, 0.025825228046024003, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 29.538], [538, 457, 0, 0.027927272727272728, \n 0.0184653265736368, 248.0, 248.0, 248.0, 0, 1, 1, -360, 21.12], [153, \n 457, 0, 0.030093223140495867, 0.019897438549384, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 22.758000000000003], [176, 457, 0, 0.004579173553719009, \n 0.0030277190305137603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 3.463], [524,\n 459, 0, 0.004318677685950414, 0.011421923596476799, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 6.532], [458, 459, 0, 0.001993388429752066, \n 0.0052720605700488, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.015], [134, \n 459, 0, 0.011813553719008265, 0.031244171895617998, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 17.868], [460, 461, 0, 6.611570247933885e-05, \n 0.000174861047098892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.1], [150, \n 461, 0, 0.008018512396694214, 0.021207147792120403, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 12.128], [149, 461, 0, 0.005586115702479339, \n 0.0147740098693748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.449], [521, \n 463, 0, 0.014348429752066114, 0.009487086110365599, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 10.850999999999999], [462, 463, 0, 0.007197355371900825,\n 0.0047588433967958406, 248.0, 248.0, 248.0, 0, 1, 1, -360, 5.443], [538,\n 463, 0, 0.012211570247933883, 0.0080742088497664, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 9.235], [110, 464, 0, 0.0025753116343490306, \n 0.0924473799817492, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.875], [90,\n 464, 0, 0.007328947368421053, 0.26309125979076, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 42.332], [165, 464, 0, 0.002152527700831025, \n 0.0772704722900764, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.433], [\n 458, 465, 0, 0.002003305785123967, 0.0052982897270776, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 3.03], [134, 465, 0, 0.011838677685950413, \n 0.031310619093534, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.906], [524, \n 465, 0, 0.004293553719008264, 0.0113554763986092, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.494], [466, 467, 0, 0.0023509349030470914, \n 0.084392804892244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.579], [110,\n 467, 0, 0.0025337603878116343, 0.09095579200221118, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 14.635], [165, 467, 0, 0.0022891274238227145, \n 0.08217406777274441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 13.222000000000001], [468, 469, 0, 0.0005269421487603305, \n 0.0013936425453786, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.797], [541, \n 469, 0, 0.022390743801652895, 0.05921844221026801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 33.866], [490, 469, 0, 0.028243305785123966, \n 0.07469714209944801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.718], [263,\n 471, 0, 0.0371900826446281, 0.0245898347482832, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 28.125], [470, 471, 0, 0.001570909090909091, \n 0.0010386746197682802, 248.0, 248.0, 248.0, 0, 1, 1, -360, 1.188], [534,\n 471, 0, 0.024497190082644622, 0.0161973787927468, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 18.526], [136, 472, 0, 0.0007079293628808865, \n 0.025412930201351602, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 4.0889999999999995], [110, 472, 0, 0.00019511772853185596, \n 0.0070042485539216805, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.127], [\n 251, 472, 0, 4.207063711911357e-05, 0.00151023282928764, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 0.243], [226, 474, 0, 0.017639669421487602, \n 0.011663231841509601, 248.0, 248.0, 248.0, 0, 1, 1, -360, 13.34], [473,\n 474, 0, 0.003467107438016529, 0.00916971330986216, 495.0, 495.0, 495.0,\n 0, 2, 1, -360, 5.244], [257, 474, 0, 0.020264462809917356, \n 0.053594910935781594, 495.0, 495.0, 495.0, 0, 2, 1, -360, 30.65], [6, \n 474, 0, 0.08066247933884299, 0.05333349367016, 248.0, 248.0, 248.0, 0, \n 1, 1, -360, 61.001000000000005], [299, 475, 0, 0.013238227146814403, \n 0.47521993028123993, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 76.464], [3,\n 475, 0, 0.0002794321329639889, 0.010030929162389441, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 1.614], [210, 475, 0, 0.0001481994459833795, \n 0.00531999712702368, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.856], [\n 297, 476, 0, 0.0193500826446281, 0.05117658265464801, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 29.267], [296, 476, 0, 0.005596694214876033, \n 0.014801987636898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.465], [295, \n 476, 0, 0.0009474380165289256, 0.00250575880492432, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 1.433], [313, 478, 0, 0.008696849030470914, \n 0.31219557906752804, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 50.233000000000004], [477, 478, 0, 1.5235457063711912e-05, \n 0.0005469155924977479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 0.08800000000000001], [245, 478, 0, 0.005264542936288089, \n 0.188984197007248, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.408], [479,\n 481, 0, 0.028420495867768597, 0.07516576970575199, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 42.986000000000004], [565, 481, 0, 0.024842314049586776,\n 0.065702289836964, 495.0, 495.0, 495.0, 0, 1, 1, -360, 37.574], [480, \n 481, 0, 7.735537190082645e-05, 0.000204587425105844, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.11699999999999999], [415, 482, 0, \n 0.011021814404432133, 0.0989140353680364, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 31.831], [56, 482, 0, 0.002630886426592798, 0.0236105947261788, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 7.598], [409, 482, 0, \n 0.0007635041551246537, 0.0068519822810072005, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 2.205], [483, 484, 0, 9.037396121883656e-05, \n 0.000811050963873968, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.261], [3, \n 484, 0, 0.010022160664819944, 0.08994275516621358, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 28.944000000000003], [301, 484, 0, 0.00966516620498615, \n 0.08673894848517479, 856.0, 856.0, 856.0, 0, 1, 1, -360, 27.913], [233,\n 485, 0, 0.01410180055401662, 0.1265550251138996, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 40.726], [392, 485, 0, 0.00914819944598338, \n 0.0820994883738036, 856.0, 856.0, 856.0, 0, 1, 1, -360, 26.42], [391, \n 485, 0, 8.518005540166207e-05, 0.000764438839512864, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 0.24600000000000002], [579, 488, 0, \n 0.004636473829194215, 0.11036180126571601, 1486.0, 1486.0, 1486.0, 0, 1,\n 1, -360, 21.038], [486, 488, 0, 0.00016969696969690082, \n 0.00403929018798184, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 0.77], [487,\n 488, 0, 0.00014567493112954544, 0.00346749456396992, 1486.0, 1486.0, \n 1486.0, 0, 1, 1, -360, 0.6609999999999999], [270, 489, 0, \n 0.0001745152354570637, 0.0062646695140596, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 1.008], [331, 489, 0, 0.003002943213296399, \n 0.10779830627119119, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 17.345], [\n 396, 489, 0, 0.01124792243767313, 0.40377286606072005, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 64.968], [519, 253, 0, 0.013353485337561985, \n 0.141267767926912, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 40.394293146100004], [382, 349, 0, 0.009091647380263157, \n 1.30547149138788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 105.02671053600001], [349, 351, 0, 0.0005858117819605263, \n 0.0841168325920224, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 6.76729770521], [459, 465, 0, 1.578788789911157e-05, \n 0.00016702153987596, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.047758360894800005], [549, 550, 0, 3.680432518409091e-05, \n 0.000389356391787088, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.111333083682], [550, 551, 0, 5.755645674710744e-05, \n 0.0006088951287918401, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.17410828165999997], [194, 195, 0, 1.7560672583171745e-05, \n 0.00252154053805592, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.202860889681], [247, 248, 0, 2.1755213937811637e-05, \n 0.0031238355819477198, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.25131623141], [2, 294, 0, 2.3531392658518004e-05, 0.003378877444715, \n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.271834647991], [549, 551, 0, \n 9.265809538429751e-05, 0.0009802386406577602, 991.0, 991.0, 991.0, 0, 1,\n 1, -360, 0.28029073853799996], [54, 365, 0, 2.573045189134349e-05, \n 0.00369464080598484, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.297238180249], [131, 265, 0, 2.7616389041343487e-05, \n 0.00396544290388756, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.319024526206], [91, 92, 0, 2.8945628197853184e-05, \n 0.0041563086239824396, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.33437989694200004], [247, 249, 0, 3.098840072160664e-05, \n 0.00444963074500788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.357978005136], [186, 191, 0, 3.1591661821191135e-05, \n 0.00453625312865552, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.36494687735799997], [129, 173, 0, 3.202671277479225e-05, \n 0.00459872218332188, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.369972585975], [96, 202, 0, 3.5971247867797784e-05, \n 0.00516511877739804, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.415539855369], [53, 320, 0, 3.784209581142659e-05, \n 0.00543375421308236, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.437151890814], [24, 396, 0, 4.144748602818559e-05, \n 0.005951452925597279, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.47880135859800005], [133, 156, 0, 4.431754564044322e-05, \n 0.0063635653674415605, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.511956287238], [442, 452, 0, 4.483572190450138e-05, \n 0.006437970402313801, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.517942259441], [445, 452, 0, 4.490753296371191e-05, \n 0.0064482817668697215, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.518771820797], [247, 250, 0, 4.594910768732687e-05, \n 0.00659784169268824, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.530804092004], [187, 195, 0, 4.755760376239612e-05, \n 0.006828805970367921, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.549385438663], [216, 236, 0, 5.03353075283241e-05, \n 0.00722765701751724, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.581473472567], [244, 389, 0, 5.1633313019736845e-05, \n 0.007414037889302401, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.596468032004], [394, 406, 0, 5.6346419007686985e-05, \n 0.008090793734075721, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.650913832377], [442, 445, 0, 6.388070648310249e-05, \n 0.00917264360085512, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.737949921293], [442, 444, 0, 6.584378362735456e-05, \n 0.00945452224616264, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.760627388463], [198, 472, 0, 8.37554210498615e-05, 0.0120264578966664,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.967542623967], [464, 467, 0, \n 8.460287496468144e-05, 0.01214814397621276, 3423.0, 3423.0, 3423.0, 0, \n 1, 1, -360, 0.977332411594], [198, 251, 0, 8.83613182396122e-05, \n 0.012687819608389479, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.0207499483], [112, 143, 0, 9.049653833033241e-05, \n 0.012994416294241841, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.04541601079], [2, 490, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [5, 491, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, \n 1, -360, 360], [10, 492, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [12, 493, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [13, 494, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [15, 495, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [18, 496, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [20, 497, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [22, 498, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [24, 499, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [26, 500, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [30, 501, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [32, 502, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [37, 503, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [42, 504, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [46, 505, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [52, 506, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [56, 507, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [61, 508, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [68, 509, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [69, 510, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [74, 511, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [78, 512, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [86, 513, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [87, 514, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [94, 515, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [95, 516, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [96, 517, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [99, 518, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [100, 519, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [104, 520, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [105, 521, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [106, 522, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [107, 523, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [117, 524, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [120, 525, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [123, 526, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [124, 527, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [125, 528, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [128, 529, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [129, 530, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [138, 531, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [143, 532, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [156, 533, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [157, 534, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [159, 535, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [160, 536, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [165, 537, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [184, 538, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [191, 539, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [195, 540, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [201, 541, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [220, 542, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [231, 543, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [232, 544, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [233, 545, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [236, 546, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [245, 547, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [246, 548, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [248, 549, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [249, 550, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [250, 551, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [259, 552, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [261, 553, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [262, 554, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [265, 555, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [270, 556, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [277, 557, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [279, 558, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [280, 559, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [290, 560, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [301, 561, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [305, 562, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [306, 563, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [310, 564, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [313, 565, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [315, 566, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [320, 567, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [330, 568, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [332, 569, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [334, 570, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [336, 571, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [349, 572, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [351, 573, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [358, 574, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [360, 575, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [380, 576, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [382, 577, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [383, 578, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [389, 579, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [401, 580, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [402, 581, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [409, 582, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [415, 583, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [444, 584, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [452, 585, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360]])\n', (162058, 292905), False, 'from numpy import array\n'), ((307792, 354704), 'numpy.array', 'array', (['[[586, 1, 0.08658028904199107, 4.329014452099554, 0, 0, 0], [589, 1, \n 0.010042676909098597, 0.5021338454549299, 0, 0, 0], [590, 1, \n 0.012095775674984046, 0.6047887837492023, 0, 0, 0], [593, 1, \n 0.0017666198683200384, 0.08833099341600192, 0, 0, 0], [594, 1, \n 0.006047887837492023, 0.30239439187460115, 0, 0, 0], [595, 1, \n 1.50560576164933, 75.2802880824665, 0, 0, 0], [597, 1, \n 0.030239439187460113, 1.5119719593730057, 0, 0, 0], [598, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [599, 1, \n 0.0029602819415092537, 0.1480140970754627, 0, 0, 0], [601, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [602, 1, \n 0.007830423200121252, 0.39152116000606263, 0, 0, 0], [603, 1, \n 1.0997606567649967, 54.98803283824984, 0, 0, 0], [607, 1, \n 0.5729577951308232, 28.64788975654116, 0, 0, 0], [608, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [609, 1, \n 0.0057932399285449895, 0.2896619964272495, 0, 0, 0], [610, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [612, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [613, 1, \n 0.027056340325622208, 1.3528170162811104, 0, 0, 0], [614, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [616, 1, \n 0.0046154933496649645, 0.23077466748324824, 0, 0, 0], [617, 1, \n 0.04360845440717932, 2.1804227203589663, 0, 0, 0], [618, 1, \n 0.010631550198538607, 0.5315775099269304, 0, 0, 0], [619, 1, \n 0.037560566569687294, 1.8780283284843649, 0, 0, 0], [621, 1, \n 0.24350706293059987, 12.175353146529993, 0, 0, 0], [623, 1, \n 0.2419155134996809, 12.095775674984045, 0, 0, 0], [624, 1, \n 0.004297183463481174, 0.21485917317405873, 0, 0, 0], [628, 1, \n 0.14292113889652203, 7.1460569448261015, 0, 0, 0], [629, 1, \n 0.023968734429639437, 1.198436721481972, 0, 0, 0], [631, 1, \n 0.025401128917466494, 1.2700564458733248, 0, 0, 0], [632, 1, \n 0.01435577586688896, 0.717788793344448, 0, 0, 0], [637, 1, \n 0.017093240888069558, 0.854662044403478, 0, 0, 0], [638, 1, \n 0.02048324117592693, 1.0241620587963465, 0, 0, 0], [639, 1, \n 0.005029296201703893, 0.25146481008519467, 0, 0, 0], [640, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [641, 1, \n 0.0040107045659157625, 0.20053522829578813, 0, 0, 0], [642, 1, \n 0.00919915571071155, 0.4599577855355775, 0, 0, 0], [643, 1, \n 0.27279157245950864, 13.639578622975431, 0, 0, 0], [646, 1, \n 0.03278591827693044, 1.6392959138465222, 0, 0, 0], [647, 1, \n 0.00445633840657307, 0.2228169203286535, 0, 0, 0], [650, 1, \n 0.4216014442504307, 21.080072212521536, 0, 0, 0], [652, 1, \n 0.00746436683100989, 0.37321834155049455, 0, 0, 0], [655, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [657, 1, \n 0.012095775674984046, 0.6047887837492023, 0, 0, 0], [658, 1, \n 0.030239439187460113, 1.5119719593730057, 0, 0, 0], [661, 1, \n 0.010408733278209955, 0.5204366639104978, 0, 0, 0], [662, 1, \n 0.002928450952890874, 0.1464225476445437, 0, 0, 0], [663, 1, \n 0.00238732414637843, 0.1193662073189215, 0, 0, 0], [666, 1, \n 0.00919915571071155, 0.4599577855355775, 0, 0, 0], [668, 1, \n 0.24382537281678363, 12.191268640839182, 0, 0, 0], [670, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [672, 1, \n 0.010536057232683471, 0.5268028616341736, 0, 0, 0], [676, 1, \n 0.11777465788800255, 5.888732894400127, 0, 0, 0], [677, 1, \n 0.004265352474862795, 0.21326762374313976, 0, 0, 0], [678, 1, \n 0.3237211542489151, 16.186057712445756, 0, 0, 0], [679, 1, \n 0.2212253708977345, 11.061268544886726, 0, 0, 0], [681, 1, \n 0.0063821132179850025, 0.31910566089925013, 0, 0, 0], [683, 1, \n 0.008753521870054244, 0.4376760935027122, 0, 0, 0], [687, 1, \n 0.42303383873825773, 21.151691936912886, 0, 0, 0], [689, 1, \n 0.09867606471697511, 4.933803235848756, 0, 0, 0], [691, 1, \n 0.008276057040778557, 0.4138028520389279, 0, 0, 0], [693, 1, \n 0.06175211791965539, 3.0876058959827692, 0, 0, 0], [694, 1, \n 0.005220282133414166, 0.2610141066707083, 0, 0, 0], [695, 1, \n 0.004679155326901723, 0.23395776634508614, 0, 0, 0], [696, 1, \n 0.22950142793851305, 11.475071396925653, 0, 0, 0], [697, 1, \n 0.0036923946797319715, 0.1846197339865986, 0, 0, 0], [698, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [699, 1, \n 0.033295214094824506, 1.6647607047412254, 0, 0, 0], [700, 1, \n 0.008594366926962348, 0.42971834634811745, 0, 0, 0], [701, 1, \n 0.015024226627874922, 0.7512113313937461, 0, 0, 0], [702, 1, \n 0.023363945645890238, 1.168197282294512, 0, 0, 0], [704, 1, \n 0.16170142218136566, 8.085071109068283, 0, 0, 0], [705, 1, \n 0.005411268065124442, 0.27056340325622213, 0, 0, 0], [707, 1, \n 0.010822536130248884, 0.5411268065124443, 0, 0, 0], [708, 1, \n 0.0024828171122335675, 0.12414085561167837, 0, 0, 0], [711, 1, \n 0.056054370956965534, 2.802718547848277, 0, 0, 0], [713, 1, \n 0.004265352474862795, 0.21326762374313976, 0, 0, 0], [714, 1, \n 0.00477464829275686, 0.238732414637843, 0, 0, 0], [716, 1, \n 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0], [717, 1, \n 0.0017507043740108488, 0.08753521870054244, 0, 0, 0], [719, 1, \n 0.623250757147862, 31.162537857393104, 0, 0, 0], [721, 1, \n 0.0012732395447351628, 0.06366197723675814, 0, 0, 0], [722, 1, \n 0.006589014644004467, 0.3294507322002233, 0, 0, 0], [723, 1, \n 0.006270704757820675, 0.31353523789103377, 0, 0, 0], [724, 1, \n 0.0019257748114119334, 0.09628874057059668, 0, 0, 0], [725, 1, \n 0.25464790894703254, 12.732395447351628, 0, 0, 0], [726, 1, \n 0.040107045659157625, 2.0053522829578814, 0, 0, 0], [727, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [728, 1, \n 0.16233804195373325, 8.116902097686662, 0, 0, 0], [730, 1, \n 0.10077690996578814, 5.038845498289407, 0, 0, 0], [731, 1, \n 0.2848873481344926, 14.244367406724633, 0, 0, 0], [732, 1, \n 0.004647324338283344, 0.2323662169141672, 0, 0, 0], [733, 1, \n 0.12624170086049138, 6.312085043024569, 0, 0, 0], [735, 1, \n 0.013496339174192726, 0.6748169587096363, 0, 0, 0], [736, 1, \n 0.010185916357881302, 0.5092958178940651, 0, 0, 0], [737, 1, \n 0.00891267681314614, 0.445633840657307, 0, 0, 0], [738, 1, \n 0.04408591923645501, 2.2042959618227504, 0, 0, 0], [739, 1, \n 0.01906676218240906, 0.9533381091204531, 0, 0, 0], [741, 1, \n 0.0340591578216656, 1.7029578910832803, 0, 0, 0], [742, 1, \n 0.0028647889756541157, 0.14323944878270578, 0, 0, 0], [743, 1, \n 0.44881693951914486, 22.440846975957243, 0, 0, 0], [745, 1, \n 0.013369015219719208, 0.6684507609859605, 0, 0, 0], [746, 1, \n 0.03183098861837907, 1.5915494309189535, 0, 0, 0], [747, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [748, 1, \n 0.03501408748021698, 1.7507043740108488, 0, 0, 0], [749, 1, \n 0.0025464790894703256, 0.12732395447351627, 0, 0, 0], [750, 1, \n 0.028902537665488188, 1.4451268832744095, 0, 0, 0], [758, 1, \n 0.0058887328944001276, 0.2944366447200064, 0, 0, 0], [760, 1, \n 0.2527380496299298, 12.636902481496492, 0, 0, 0], [761, 1, \n 0.004997465213085514, 0.2498732606542757, 0, 0, 0], [762, 1, \n 0.3517324242330887, 17.586621211654435, 0, 0, 0], [763, 1, \n 0.006461690689530951, 0.32308453447654756, 0, 0, 0], [765, 1, \n 0.018780283284843647, 0.9390141642421824, 0, 0, 0], [767, 1, \n 0.0035650707252584553, 0.17825353626292276, 0, 0, 0], [769, 1, \n 0.013782818071758136, 0.6891409035879068, 0, 0, 0], [771, 1, \n 0.21963382146681557, 10.981691073340778, 0, 0, 0], [772, 1, \n 0.002992112930127632, 0.1496056465063816, 0, 0, 0], [774, 1, \n 0.010663381187156987, 0.5331690593578494, 0, 0, 0], [775, 1, \n 0.04074366543152521, 2.0371832715762603, 0, 0, 0], [776, 1, \n 0.01782535362629228, 0.891267681314614, 0, 0, 0], [777, 1, \n 0.012573240504259732, 0.6286620252129866, 0, 0, 0], [778, 1, \n 0.004679155326901723, 0.23395776634508614, 0, 0, 0], [779, 1, \n 0.010886198107485642, 0.5443099053742821, 0, 0, 0], [781, 1, \n 0.4169859509007658, 20.84929754503829, 0, 0, 0], [784, 1, \n 0.4058451048843331, 20.292255244216655, 0, 0, 0], [785, 1, \n 0.00047746482927568597, 0.0238732414637843, 0, 0, 0], [786, 1, \n 0.0621977517603127, 3.109887588015635, 0, 0, 0], [787, 1, \n 0.24764509145098912, 12.382254572549456, 0, 0, 0], [788, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [789, 1, \n 0.0123185925953127, 0.615929629765635, 0, 0, 0], [791, 1, \n 0.0031830988618379067, 0.15915494309189535, 0, 0, 0], [792, 1, \n 0.009979014931861837, 0.49895074659309185, 0, 0, 0], [793, 1, \n 0.0031194368846011486, 0.15597184423005744, 0, 0, 0], [794, 1, \n 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0], [795, 1, \n 0.004329014452099553, 0.2164507226049777, 0, 0, 0], [796, 1, \n 0.027088171314240586, 1.3544085657120293, 0, 0, 0], [798, 1, \n 0.10179550160157626, 5.089775080078813, 0, 0, 0], [800, 1, \n 0.0058091554228541795, 0.290457771142709, 0, 0, 0], [801, 1, \n 0.007957747154594767, 0.3978873577297384, 0, 0, 0], [802, 1, \n 0.07957747154594767, 3.9788735772973833, 0, 0, 0], [805, 1, \n 0.44881693951914486, 22.440846975957243, 0, 0, 0], [806, 1, \n 0.005697746962689853, 0.2848873481344927, 0, 0, 0], [808, 1, \n 0.034616200122487235, 1.7308100061243619, 0, 0, 0], [809, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [811, 1, \n 0.0040107045659157625, 0.20053522829578813, 0, 0, 0], [814, 1, \n 0.014164789935178685, 0.7082394967589343, 0, 0, 0], [816, 1, \n 0.012748310941660816, 0.6374155470830408, 0, 0, 0], [817, 1, \n 0.017188733853924696, 0.8594366926962349, 0, 0, 0], [818, 1, \n 0.24096058384112953, 12.048029192056477, 0, 0, 0], [821, 1, \n 0.013130282805081364, 0.6565141402540683, 0, 0, 0], [822, 1, \n 0.04265352474862795, 2.1326762374313977, 0, 0, 0], [825, 1, \n 0.013591832140047864, 0.6795916070023932, 0, 0, 0], [826, 1, \n 0.018461973398659858, 0.9230986699329929, 0, 0, 0], [830, 1, \n 0.02832957987035737, 1.4164789935178685, 0, 0, 0], [833, 1, \n 0.0059205638830185075, 0.2960281941509254, 0, 0, 0], [834, 1, \n 0.007416620348082323, 0.37083101740411617, 0, 0, 0], [835, 1, \n 0.010138169874953733, 0.5069084937476867, 0, 0, 0], [836, 1, \n 0.008116902097686661, 0.4058451048843331, 0, 0, 0], [837, 1, \n 0.15024226627874918, 7.512113313937459, 0, 0, 0], [839, 1, \n 0.011666057328635928, 0.5833028664317964, 0, 0, 0], [840, 1, \n 0.4427690516816528, 22.138452584082643, 0, 0, 0], [841, 1, \n 0.0037083101740411615, 0.18541550870205808, 0, 0, 0], [843, 1, \n 0.10599719209920229, 5.2998596049601145, 0, 0, 0], [844, 1, \n 0.012732395447351627, 0.6366197723675814, 0, 0, 0], [845, 1, \n 0.10122254380644544, 5.061127190322272, 0, 0, 0], [848, 1, \n 0.013369015219719208, 0.6684507609859605, 0, 0, 0], [849, 1, \n 0.24796340133717296, 12.398170066858649, 0, 0, 0], [850, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [851, 1, \n 0.01265281797580568, 0.632640898790284, 0, 0, 0], [852, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [853, 1, \n 0.0036923946797319715, 0.1846197339865986, 0, 0, 0], [855, 1, \n 0.21899720169444797, 10.949860084722399, 0, 0, 0], [856, 1, \n 0.011459155902616463, 0.5729577951308231, 0, 0, 0], [857, 1, \n 0.4462704604296745, 22.313523021483725, 0, 0, 0], [858, 1, \n 0.01808000153523931, 0.9040000767619655, 0, 0, 0], [859, 1, \n 0.027056340325622208, 1.3528170162811104, 0, 0, 0], [860, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [862, 1, \n 0.23077466748324824, 11.538733374162412, 0, 0, 0], [863, 1, \n 0.0001909859317102744, 0.00954929658551372, 0, 0, 0], [864, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [865, 1, \n 0.0035014087480216977, 0.17507043740108488, 0, 0, 0], [866, 1, \n 0.08290062675770644, 4.145031337885323, 0, 0, 0], [867, 1, \n 0.24478030247533505, 12.239015123766753, 0, 0, 0], [869, 1, \n 0.4329014452099553, 21.645072260497766, 0, 0, 0], [870, 1, \n 0.018589297353133374, 0.9294648676566688, 0, 0, 0], [872, 1, \n 0.00716197243913529, 0.3580986219567645, 0, 0, 0], [873, 1, \n 0.038833806114422456, 1.941690305721123, 0, 0, 0], [874, 1, \n 0.006589014644004467, 0.3294507322002233, 0, 0, 0], [875, 1, \n 0.007766761222884492, 0.38833806114422464, 0, 0, 0], [876, 1, \n 0.018589297353133374, 0.9294648676566688, 0, 0, 0], [877, 1, \n 0.007894085177358009, 0.39470425886790045, 0, 0, 0], [881, 1, \n 0.3187236890358296, 15.93618445179148, 0, 0, 0], [882, 1, \n 0.005538592019597957, 0.2769296009798979, 0, 0, 0], [883, 1, \n 0.005729577951308231, 0.28647889756541156, 0, 0, 0], [885, 1, \n 0.15597184423005742, 7.798592211502871, 0, 0, 0], [886, 1, \n 0.8186930272647096, 40.93465136323548, 0, 0, 0], [888, 1, \n 0.011172677005051054, 0.5586338502525526, 0, 0, 0], [889, 1, \n 0.0030239439187460114, 0.15119719593730058, 0, 0, 0], [890, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [895, 1, \n 0.0030239439187460114, 0.15119719593730058, 0, 0, 0], [896, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [897, 1, \n 0.01782535362629228, 0.891267681314614, 0, 0, 0], [898, 1, \n 0.013464508185574344, 0.6732254092787172, 0, 0, 0], [899, 1, \n 0.002705634032562221, 0.13528170162811107, 0, 0, 0], [900, 1, \n 0.03584169318429482, 1.7920846592147412, 0, 0, 0], [902, 1, \n 0.006207042780583919, 0.31035213902919595, 0, 0, 0], [903, 1, \n 0.0031990143561470966, 0.15995071780735484, 0, 0, 0], [905, 1, \n 0.021851973686517232, 1.0925986843258617, 0, 0, 0], [906, 1, \n 0.010504226244065093, 0.5252113122032547, 0, 0, 0], [907, 1, \n 0.02142225534016911, 1.0711127670084555, 0, 0, 0], [909, 1, \n 0.005856901905781748, 0.2928450952890874, 0, 0, 0], [913, 1, \n 0.02355493157760051, 1.1777465788800257, 0, 0, 0], [915, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [917, 1, \n 0.005411268065124442, 0.27056340325622213, 0, 0, 0], [918, 1, \n 0.012254930618075942, 0.612746530903797, 0, 0, 0], [920, 1, \n 0.0020371832715762603, 0.10185916357881303, 0, 0, 0], [921, 1, \n 0.019735212943395024, 0.9867606471697512, 0, 0, 0], [922, 1, \n 0.05220282133414166, 2.6101410667070835, 0, 0, 0], [923, 1, \n 0.023236621691416718, 1.161831084570836, 0, 0, 0], [924, 1, \n 0.0037242256683503506, 0.18621128341751753, 0, 0, 0], [925, 1, \n 0.008276057040778557, 0.4138028520389279, 0, 0, 0], [928, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [931, 1, \n 0.03455253814525047, 1.7276269072625237, 0, 0, 0], [934, 1, \n 0.09421972631040204, 4.710986315520103, 0, 0, 0], [935, 1, \n 0.007352958370845565, 0.36764791854227824, 0, 0, 0], [936, 1, \n 0.016615776058793875, 0.8307888029396938, 0, 0, 0], [937, 1, \n 0.00477464829275686, 0.238732414637843, 0, 0, 0], [939, 1, \n 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0], [940, 1, \n 0.009421972631040205, 0.47109863155201026, 0, 0, 0], [942, 1, \n 0.016520283092938737, 0.8260141546469368, 0, 0, 0], [944, 1, \n 0.004042535554534142, 0.2021267777267071, 0, 0, 0], [945, 1, \n 0.011140846016432674, 0.5570423008216338, 0, 0, 0], [948, 1, \n 0.025146481008519465, 1.2573240504259733, 0, 0, 0], [950, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [952, 1, \n 0.005045211696013082, 0.2522605848006541, 0, 0, 0], [956, 1, \n 0.020690142601946394, 1.0345071300973196, 0, 0, 0], [957, 1, \n 0.0019098593171027439, 0.0954929658551372, 0, 0, 0], [958, 1, \n 0.010615634704229418, 0.530781735211471, 0, 0, 0], [959, 1, \n 0.007241549910681238, 0.3620774955340619, 0, 0, 0], [960, 1, \n 0.004217605991935227, 0.21088029959676136, 0, 0, 0], [963, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [965, 1, \n 0.11204507993669433, 5.602253996834716, 0, 0, 0], [966, 1, \n 0.021008452488130186, 1.0504226244065094, 0, 0, 0], [967, 1, \n 0.01193662073189215, 0.5968310365946076, 0, 0, 0], [968, 1, \n 0.017188733853924696, 0.8594366926962349, 0, 0, 0], [969, 1, \n 0.018111832523857688, 0.9055916261928845, 0, 0, 0], [971, 1, \n 0.0031830988618379067, 0.15915494309189535, 0, 0, 0], [972, 1, \n 0.12414085561167836, 6.207042780583918, 0, 0, 0], [973, 1, \n 0.4287634166895661, 21.438170834478306, 0, 0, 0], [975, 1, \n 0.01671126902464901, 0.8355634512324506, 0, 0, 0], [976, 1, \n 0.008562535938343968, 0.4281267969171984, 0, 0, 0], [977, 1, \n 0.1031324031235482, 5.15662015617741, 0, 0, 0], [978, 1, \n 0.0007321127382227185, 0.03660563691113593, 0, 0, 0], [981, 1, \n 0.03787887645587108, 1.8939438227935543, 0, 0, 0], [982, 1, \n 0.0015756339366097638, 0.07878169683048819, 0, 0, 0], [983, 1, \n 0.01400563499208679, 0.7002817496043395, 0, 0, 0], [984, 1, \n 0.14801409707546268, 7.400704853773133, 0, 0, 0], [985, 1, \n 0.0035014087480216977, 0.17507043740108488, 0, 0, 0], [986, 1, \n 0.0017825353626292277, 0.08912676813146138, 0, 0, 0], [987, 1, \n 0.02618098813861678, 1.3090494069308392, 0, 0, 0], [988, 1, \n 0.0008116902097686662, 0.04058451048843331, 0, 0, 0], [990, 1, \n 0.0954929658551372, 4.7746482927568605, 0, 0, 0], [993, 1, \n 0.06238873769202297, 3.119436884601149, 0, 0, 0], [994, 1, \n 0.010504226244065093, 0.5252113122032547, 0, 0, 0], [995, 1, \n 0.0006684507609859605, 0.033422538049298026, 0, 0, 0], [996, 1, \n 0.003660563691113593, 0.18302818455567965, 0, 0, 0], [997, 1, \n 0.005984225860255264, 0.2992112930127632, 0, 0, 0], [998, 1, \n 0.13464508185574348, 6.732254092787174, 0, 0, 0], [999, 1, \n 0.004965634224467135, 0.24828171122335674, 0, 0, 0], [1000, 1, \n 0.015597184423005743, 0.7798592211502873, 0, 0, 0], [1002, 1, \n 0.0031512678732195276, 0.15756339366097638, 0, 0, 0], [1003, 1, \n 0.2864788975654116, 14.32394487827058, 0, 0, 0], [1007, 1, \n 0.007416620348082323, 0.37083101740411617, 0, 0, 0], [1008, 1, \n 0.015597184423005743, 0.7798592211502873, 0, 0, 0], [1010, 1, \n 0.238732414637843, 11.93662073189215, 0, 0, 0], [1011, 1, \n 0.005952394871636886, 0.2976197435818443, 0, 0, 0], [1012, 1, \n 0.9024085273310466, 45.12042636655233, 0, 0, 0], [1018, 1, \n 0.05599070897972878, 2.7995354489864392, 0, 0, 0], [1019, 1, \n 0.03819718634205488, 1.909859317102744, 0, 0, 0], [1023, 1, \n 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0], [1025, 1, \n 0.03616000307047862, 1.808000153523931, 0, 0, 0], [1026, 1, \n 0.20868396138209316, 10.434198069104658, 0, 0, 0], [1027, 3, \n 0.003074873500535418, 0.15374367502677092, 2.22, 61.69, 0.004502], [\n 1028, 2, 0.025464790894703257, 1.273239544735163, 0, 0, 0], [1029, 2, \n 0.003819718634205488, 0.19098593171027442, 0, 0, 0], [1030, 2, \n 0.06480789282701978, 3.2403946413509894, 0, 0, 0], [1031, 2, \n 0.0921316134570364, 4.60658067285182, 0, 0, 0], [1032, 2, \n 0.009772775025341927, 0.4886387512670964, 0, 0, 0], [1033, 2, \n 0.0026465383981801338, 0.1323269199090067, 0, 0, 0], [1034, 2, \n 0.005364335122251813, 0.26821675611259066, 0, 0, 0], [1035, 3, \n 0.00317587127473044, 0.158793563736522, 2.22, 61.69, 0.004502], [1036, \n 2, 0.003979401770097368, 0.19897008850486841, 0, 0, 0], [1037, 2, \n 0.0060277734620055035, 0.3013886731002752, 0, 0, 0], [1038, 2, \n 0.005462103769994554, 0.2731051884997277, 0, 0, 0], [1039, 2, \n 0.005913400500746229, 0.2956700250373115, 0, 0, 0], [1041, 2, \n 0.008736705901893021, 0.4368352950946511, 0, 0, 0], [1042, 2, \n 0.002236240074990482, 0.1118120037495241, 0, 0, 0], [1044, 3, \n 0.0023022419250361527, 0.11511209625180763, 2.22, 61.69, 0.004502], [\n 1046, 2, 0.00679827557108513, 0.33991377855425653, 0, 0, 0], [1047, 3, \n 0.0008294889076348922, 0.04147444538174461, 2.22, 61.69, 0.004502], [\n 1048, 2, 0.004561818873896339, 0.22809094369481697, 0, 0, 0], [1049, 2,\n 0.01870104799381521, 0.9350523996907605, 0, 0, 0], [1050, 2, \n 0.001674586221361763, 0.08372931106808816, 0, 0, 0], [1051, 2, \n 0.008610220286935111, 0.4305110143467556, 0, 0, 0], [1052, 3, \n 0.001315809692296204, 0.06579048461481019, 2.22, 61.69, 0.004502], [\n 1053, 3, 0.001042024786453249, 0.05210123932266245, 2.22, 61.69, \n 0.004502], [1054, 2, 0.017434200209443074, 0.8717100104721537, 0, 0, 0],\n [1055, 3, 0.00011871244168422753, 0.005935622084211377, 2.22, 61.69, \n 0.004502], [1056, 2, 0.022965347424951363, 1.1482673712475682, 0, 0, 0],\n [1057, 2, 0.02718238967557453, 1.3591194837787268, 0, 0, 0], [1058, 2, \n 0.04461485898591968, 2.2307429492959843, 0, 0, 0], [1059, 2, \n 0.017013874249418158, 0.8506937124709079, 0, 0, 0], [1060, 3, \n 0.0003260901937161927, 0.016304509685809633, 2.22, 61.69, 0.004502], [\n 1061, 2, 0.005436368167959151, 0.27181840839795757, 0, 0, 0], [1062, 3,\n 0.00011488919588970951, 0.005744459794485476, 2.22, 61.69, 0.004502], [\n 1063, 3, 0.0003425274483539234, 0.01712637241769617, 2.22, 61.69, \n 0.004502], [1064, 2, 0.008174693602404245, 0.4087346801202122, 0, 0, 0],\n [1065, 2, 0.014487023099809197, 0.7243511549904599, 0, 0, 0], [1066, 2,\n 0.005099925117482008, 0.2549962558741004, 0, 0, 0], [1067, 3, \n 0.0008283924405670749, 0.04141962202835374, 2.22, 61.69, 0.004502], [\n 1072, 2, 0.007168748144119091, 0.3584374072059546, 0, 0, 0], [1073, 2, \n 0.004954025493475761, 0.24770127467378808, 0, 0, 0], [1074, 2, \n 0.009778033156939965, 0.48890165784699824, 0, 0, 0], [1077, 3, \n 0.0007664457877913659, 0.0383222893895683, 2.22, 61.69, 0.004502], [\n 1079, 2, 0.004604543003215469, 0.23022715016077344, 0, 0, 0], [1080, 2,\n 0.003349647097644276, 0.1674823548822138, 0, 0, 0], [1081, 2, \n 0.01601172754276052, 0.800586377138026, 0, 0, 0], [1082, 2, \n 0.016964047639621335, 0.8482023819810669, 0, 0, 0], [1083, 2, \n 0.02162958181089171, 1.0814790905445855, 0, 0, 0], [1084, 2, \n 0.019857016058101837, 0.992850802905092, 0, 0, 0], [1085, 2, \n 0.005758465971105609, 0.2879232985552805, 0, 0, 0], [1086, 2, \n 0.011188498437811297, 0.5594249218905649, 0, 0, 0], [1087, 2, \n 0.00397539235779677, 0.19876961788983852, 0, 0, 0], [1088, 3, \n 0.0013881136481632718, 0.06940568240816358, 2.22, 61.69, 0.004502], [\n 1089, 2, 0.01263503907246808, 0.631751953623404, 0, 0, 0], [1090, 2, \n 0.005674885746854652, 0.2837442873427326, 0, 0, 0], [1091, 3, \n 0.002915330196419503, 0.14576650982097517, 2.22, 61.69, 0.004502], [\n 1092, 2, 0.003437876146252996, 0.1718938073126498, 0, 0, 0], [1093, 2, \n 0.009906140914748767, 0.49530704573743833, 0, 0, 0], [1094, 3, \n 0.00023930778294026586, 0.011965389147013294, 2.22, 61.69, 0.004502], [\n 1095, 3, 1.3047613994501091e-05, 0.0006523806997250545, 2.22, 61.69, \n 0.004502], [1096, 2, 0.005379826679377905, 0.2689913339688953, 0, 0, 0],\n [1097, 3, 0.0002929164939619051, 0.014645824698095257, 2.22, 61.69, \n 0.004502], [1098, 2, 0.004521623727146264, 0.22608118635731317, 0, 0, 0\n ], [1099, 2, 0.018521637260932335, 0.9260818630466169, 0, 0, 0], [1101,\n 2, 0.005343192104787693, 0.2671596052393847, 0, 0, 0], [1102, 2, \n 0.02234407998394998, 1.1172039991974991, 0, 0, 0], [1103, 2, \n 0.01562148424141561, 0.7810742120707805, 0, 0, 0], [1104, 3, \n 1.3172819714966009e-05, 0.0006586409857483004, 2.22, 61.69, 0.004502],\n [1105, 3, 0.0001386935566767763, 0.006934677833838815, 2.22, 61.69, \n 0.004502], [1106, 3, 0.00014577275883068604, 0.0072886379415343025, \n 2.22, 61.69, 0.004502], [1107, 2, 0.003391514823097816, \n 0.16957574115489077, 0, 0, 0], [1108, 2, 0.015741564572850766, \n 0.7870782286425384, 0, 0, 0], [1109, 3, 4.9542410867097304e-05, \n 0.002477120543354865, 2.22, 61.69, 0.004502], [1110, 3, \n 0.00010533237807450261, 0.00526661890372513, 2.22, 61.69, 0.004502], [\n 1111, 2, 0.003682113867455725, 0.18410569337278626, 0, 0, 0], [1112, 2,\n 0.004426690383932842, 0.2213345191966421, 0, 0, 0], [1113, 3, \n 0.00022513170529279912, 0.011256585264639957, 2.22, 61.69, 0.004502], [\n 1114, 3, 0.0005550707533170727, 0.027753537665853634, 2.22, 61.69, \n 0.004502], [1115, 2, 0.0032197222090973076, 0.16098611045486538, 0, 0, \n 0], [1116, 3, 0.002075453185310181, 0.10377265926550905, 2.22, 61.69, \n 0.004502], [1117, 2, 0.005780032679669937, 0.2890016339834969, 0, 0, 0],\n [1118, 3, 0.0005199122415272909, 0.025995612076364544, 2.22, 61.69, \n 0.004502], [1119, 3, 0.0027536366373517632, 0.13768183186758817, 2.22, \n 61.69, 0.004502], [1120, 3, 0.0001538074296570127, 0.007690371482850636,\n 2.22, 61.69, 0.004502], [1121, 3, 3.4414977793908876e-05, \n 0.0017207488896954439, 2.22, 61.69, 0.004502], [1122, 3, \n 9.313004041299959e-05, 0.00465650202064998, 2.22, 61.69, 0.004502], [\n 1123, 3, 8.936930538294867e-05, 0.004468465269147434, 2.22, 61.69, \n 0.004502], [1124, 3, 8.201464578534214e-05, 0.004100732289267108, 2.22,\n 61.69, 0.004502], [1125, 3, 0.001588801827253252, 0.07944009136266261, \n 2.22, 61.69, 0.004502], [1126, 3, 0.0018426380603240493, \n 0.09213190301620247, 2.22, 61.69, 0.004502], [1127, 2, \n 0.006703391093283916, 0.3351695546641958, 0, 0, 0], [1128, 3, \n 0.0001948941120002845, 0.009744705600014225, 2.22, 61.69, 0.004502], [\n 1129, 3, 0.0003016780123772693, 0.015083900618863466, 2.22, 61.69, \n 0.004502], [1130, 3, 6.530151955301432e-05, 0.003265075977650716, 2.22,\n 61.69, 0.004502], [1131, 3, 0.00018443373362804407, \n 0.009221686681402204, 2.22, 61.69, 0.004502], [1132, 3, \n 2.2886271300209156e-05, 0.0011443135650104578, 2.22, 61.69, 0.004502],\n [1133, 3, 4.5810964480308454e-05, 0.002290548224015423, 2.22, 61.69, \n 0.004502], [1134, 3, 3.236913111220881e-05, 0.0016184565556104404, 2.22,\n 61.69, 0.004502], [1135, 3, 0.0004528723014143959, 0.022643615070719797,\n 2.22, 61.69, 0.004502], [1136, 3, 2.5636662405410735e-05, \n 0.0012818331202705368, 2.22, 61.69, 0.004502], [1137, 3, \n 0.00016790582753671083, 0.008395291376835541, 2.22, 61.69, 0.004502], [\n 1138, 3, 7.98498118498449e-05, 0.003992490592492246, 2.22, 61.69, \n 0.004502], [1139, 3, 0.0012619566606414858, 0.0630978330320743, 2.22, \n 61.69, 0.004502], [1140, 3, 0.0013901725262468376, 0.06950862631234189,\n 2.22, 61.69, 0.004502], [1141, 2, 0.0076053500901520025, \n 0.38026750450760016, 0, 0, 0], [1142, 3, 7.73959943559724e-05, \n 0.00386979971779862, 2.22, 61.69, 0.004502], [1143, 3, \n 0.001326344775515579, 0.06631723877577896, 2.22, 61.69, 0.004502], [\n 1144, 2, 0.00334399697192306, 0.16719984859615303, 0, 0, 0], [1145, 2, \n 0.011197481443497569, 0.5598740721748785, 0, 0, 0], [1146, 3, \n 5.4833151376821656e-05, 0.002741657568841083, 2.22, 61.69, 0.004502], [\n 1147, 3, 0.002909588342312674, 0.14547941711563372, 2.22, 61.69, \n 0.004502], [1148, 3, 0.0011233492673683868, 0.05616746336841934, 2.22, \n 61.69, 0.004502], [1149, 3, 0.0005447417794635118, 0.02723708897317559,\n 2.22, 61.69, 0.004502], [1150, 3, 0.0002306193019977063, \n 0.011530965099885314, 2.22, 61.69, 0.004502], [1151, 3, \n 0.0008299047575760064, 0.04149523787880033, 2.22, 61.69, 0.004502], [\n 1152, 3, 7.417749437366368e-06, 0.0003708874718683184, 2.22, 61.69, \n 0.004502], [1153, 3, 4.283385139953296e-06, 0.0002141692569976648, 2.22,\n 61.69, 0.004502], [1154, 3, 1.0001936259040478e-05, \n 0.0005000968129520238, 2.22, 61.69, 0.004502], [1155, 3, \n 3.879887736397654e-05, 0.001939943868198827, 2.22, 61.69, 0.004502], [\n 1156, 3, 0.0010200134924871187, 0.05100067462435595, 2.22, 61.69, \n 0.004502], [1157, 3, 0.00027719360593007886, 0.013859680296503944, 2.22,\n 61.69, 0.004502], [1158, 3, 6.640198284893194e-05, 0.003320099142446597,\n 2.22, 61.69, 0.004502], [1159, 3, 0.0008593149079194712, \n 0.04296574539597356, 2.22, 61.69, 0.004502], [1160, 2, \n 0.015175599618213626, 0.7587799809106813, 0, 0, 0], [1161, 3, \n 0.0008335971783564253, 0.04167985891782127, 2.22, 61.69, 0.004502], [\n 1162, 2, 0.02334015009089389, 1.1670075045446946, 0, 0, 0], [1163, 2, \n 0.014481760844263846, 0.7240880422131923, 0, 0, 0], [1164, 2, \n 0.01586368621264448, 0.793184310632224, 0, 0, 0], [1165, 2, \n 0.0025257844262807964, 0.12628922131403983, 0, 0, 0], [1166, 2, \n 0.005301588846150501, 0.26507944230752506, 0, 0, 0], [1167, 3, \n 0.00032173361521807824, 0.016086680760903912, 2.22, 61.69, 0.004502], [\n 1168, 3, 8.56746647323757e-05, 0.004283733236618785, 2.22, 61.69, \n 0.004502], [1169, 3, 0.00017327803824915608, 0.008663901912457804, 2.22,\n 61.69, 0.004502], [1170, 3, 1.6933420442211857e-05, \n 0.000846671022110593, 2.22, 61.69, 0.004502], [1173, 2, \n 0.01618626952698487, 0.8093134763492436, 0, 0, 0], [1174, 3, \n 8.021928882473966e-05, 0.004010964441236983, 2.22, 61.69, 0.004502], [\n 1175, 3, 5.445989361520192e-05, 0.002722994680760096, 2.22, 61.69, \n 0.004502], [1176, 3, 1.4783581244732665e-05, 0.0007391790622366333, \n 2.22, 61.69, 0.004502], [1177, 3, 0.0017745146198091144, \n 0.08872573099045572, 2.22, 61.69, 0.004502], [1178, 3, \n 0.0001923728167601116, 0.00961864083800558, 2.22, 61.69, 0.004502], [\n 1179, 3, 8.316119408334767e-05, 0.004158059704167384, 2.22, 61.69, \n 0.004502], [1180, 3, 4.3834108298364086e-05, 0.002191705414918204, 2.22,\n 61.69, 0.004502], [1181, 2, 0.00545834972439398, 0.272917486219699, 0, \n 0, 0], [1182, 2, 0.006322880792722177, 0.3161440396361089, 0, 0, 0], [\n 1183, 3, 0.00177138301503702, 0.08856915075185101, 2.22, 61.69, \n 0.004502], [1184, 3, 0.0002382585530365376, 0.01191292765182688, 2.22, \n 61.69, 0.004502], [1185, 3, 0.0007221796423758263, 0.036108982118791315,\n 2.22, 61.69, 0.004502], [1186, 3, 0.0024774929167619207, \n 0.12387464583809603, 2.22, 61.69, 0.004502], [1187, 3, \n 0.0006248151564821885, 0.031240757824109424, 2.22, 61.69, 0.004502], [\n 1188, 2, 0.011440868435801076, 0.5720434217900537, 0, 0, 0], [1189, 3, \n 0.001075762722956362, 0.0537881361478181, 2.22, 61.69, 0.004502], [1190,\n 2, 0.005589994050160443, 0.2794997025080222, 0, 0, 0], [1191, 2, \n 0.0018543296854580205, 0.09271648427290104, 0, 0, 0], [1196, 2, \n 0.010230349597894291, 0.5115174798947145, 0, 0, 0], [1197, 2, \n 0.005767282789943071, 0.2883641394971536, 0, 0, 0], [1198, 3, \n 0.002534966273924786, 0.12674831369623932, 2.22, 61.69, 0.004502], [\n 1199, 2, 0.012822920004466005, 0.6411460002233003, 0, 0, 0], [1200, 2, \n 0.0035658606694853635, 0.1782930334742682, 0, 0, 0], [1203, 2, \n 0.004628517197038981, 0.23142585985194902, 0, 0, 0], [1204, 3, \n 0.0023050069174568553, 0.11525034587284279, 2.22, 61.69, 0.004502], [\n 1211, 3, 0.00045660111641118554, 0.022830055820559275, 2.22, 61.69, \n 0.004502], [1212, 2, 0.002310697165483375, 0.11553485827416875, 0, 0, 0\n ], [1213, 2, 0.001571453208551938, 0.0785726604275969, 0, 0, 0], [1214,\n 3, 0.00011420293137312512, 0.005710146568656256, 2.22, 61.69, 0.004502],\n [1215, 3, 6.379928530672539e-05, 0.0031899642653362694, 2.22, 61.69, \n 0.004502], [1216, 2, 0.001869892681863531, 0.09349463409317656, 0, 0, 0\n ], [1217, 3, 0.0009267444929459551, 0.04633722464729775, 2.22, 61.69, \n 0.004502], [1218, 3, 2.5227972538599323e-05, 0.0012613986269299662, \n 2.22, 61.69, 0.004502], [1219, 3, 0.0007855588922898729, \n 0.03927794461449365, 2.22, 61.69, 0.004502], [1220, 3, \n 0.0013820054686401347, 0.06910027343200674, 2.22, 61.69, 0.004502], [\n 1221, 2, 0.015352878695497882, 0.7676439347748941, 0, 0, 0], [1222, 2, \n 0.006253768855699434, 0.3126884427849717, 0, 0, 0], [1225, 3, \n 0.0010446814701628646, 0.05223407350814323, 2.22, 61.69, 0.004502], [\n 1226, 3, 0.00014078918131144803, 0.007039459065572402, 2.22, 61.69, \n 0.004502], [1228, 3, 7.674774797726922e-05, 0.003837387398863461, 2.22,\n 61.69, 0.004502], [1229, 2, 0.00326230849376, 0.16311542468800003, 0, 0,\n 0], [1230, 3, 4.264866012739944e-05, 0.002132433006369972, 2.22, 61.69,\n 0.004502], [1231, 3, 0.0011074075337247616, 0.05537037668623808, 2.22, \n 61.69, 0.004502], [1232, 2, 0.0025289299564359583, 0.12644649782179793,\n 0, 0, 0], [1233, 2, 0.03662908231521014, 1.831454115760507, 0, 0, 0], [\n 1235, 3, 0.0005753349157073776, 0.028766745785368877, 2.22, 61.69, \n 0.004502], [1236, 2, 0.005234608320670995, 0.26173041603354974, 0, 0, 0\n ], [1237, 3, 0.0005991995096878405, 0.02995997548439202, 2.22, 61.69, \n 0.004502], [1238, 2, 0.005358277784741974, 0.26791388923709875, 0, 0, 0\n ], [1239, 3, 0.0001443666373276477, 0.007218331866382386, 2.22, 61.69, \n 0.004502], [1240, 2, 0.01429475266593171, 0.7147376332965855, 0, 0, 0],\n [1241, 2, 0.012239428692174842, 0.6119714346087421, 0, 0, 0], [1242, 3,\n 0.0009261376778324836, 0.04630688389162418, 2.22, 61.69, 0.004502], [\n 1243, 2, 0.0030479476517051274, 0.15239738258525637, 0, 0, 0], [1244, 2,\n 0.020592901244747865, 1.0296450622373932, 0, 0, 0], [1245, 3, \n 0.0003407395317026344, 0.017036976585131723, 2.22, 61.69, 0.004502], [\n 1246, 2, 0.003636870278584459, 0.18184351392922293, 0, 0, 0], [1247, 3,\n 0.0005536878435284997, 0.027684392176424988, 2.22, 61.69, 0.004502], [\n 1248, 2, 0.005854245631350222, 0.2927122815675111, 0, 0, 0], [1249, 2, \n 0.0029138707379534994, 0.14569353689767497, 0, 0, 0], [1250, 3, \n 0.0011051662697331927, 0.055258313486659626, 2.22, 61.69, 0.004502], [\n 1251, 3, 0.0006892543892280731, 0.034462719461403654, 2.22, 61.69, \n 0.004502], [1252, 3, 0.0004933226435696849, 0.02466613217848425, 2.22, \n 61.69, 0.004502], [1253, 2, 0.0033963585596517073, 0.16981792798258535,\n 0, 0, 0], [1254, 2, 0.005238024431161238, 0.2619012215580619, 0, 0, 0],\n [1255, 3, 0.0002152231180033463, 0.010761155900167315, 2.22, 61.69, \n 0.004502], [1256, 3, 0.0008829260686159954, 0.04414630343079977, 2.22, \n 61.69, 0.004502], [1257, 2, 0.005416876706065561, 0.2708438353032781, 0,\n 0, 0], [1258, 2, 0.014991588973313675, 0.7495794486656838, 0, 0, 0], [\n 1259, 2, 0.006546118673323141, 0.32730593366615707, 0, 0, 0], [1260, 3,\n 0.0008510469735902338, 0.042552348679511694, 2.22, 61.69, 0.004502], [\n 1261, 2, 0.005305411264712167, 0.2652705632356084, 0, 0, 0], [1267, 3, \n 0.0012287427693400252, 0.06143713846700125, 2.22, 61.69, 0.004502], [\n 1274, 2, 0.001691611211165477, 0.08458056055827386, 0, 0, 0], [1275, 2,\n 0.0038666125605823546, 0.19333062802911774, 0, 0, 0], [1276, 3, \n 0.0009102374698035218, 0.04551187349017608, 2.22, 61.69, 0.004502], [\n 1277, 2, 0.0023965297543892265, 0.11982648771946133, 0, 0, 0], [1278, 2,\n 0.006398316507252847, 0.31991582536264235, 0, 0, 0], [1282, 3, \n 0.0001105941762774276, 0.00552970881387138, 2.22, 61.69, 0.004502], [\n 1283, 2, 0.08261824948992594, 4.130912474496298, 0, 0, 0], [1287, 2, \n 0.003083233730049012, 0.1541616865024506, 0, 0, 0], [1288, 2, \n 0.004640611077226182, 0.23203055386130914, 0, 0, 0], [1289, 2, \n 0.004963561654090838, 0.24817808270454192, 0, 0, 0], [1290, 3, \n 0.0001244867117459489, 0.006224335587297446, 2.22, 61.69, 0.004502], [\n 1291, 2, 0.003736373434735334, 0.1868186717367667, 0, 0, 0], [1292, 3, \n 0.0011143622294130919, 0.05571811147065459, 2.22, 61.69, 0.004502], [\n 1293, 3, 8.952966571388897e-05, 0.004476483285694448, 2.22, 61.69, \n 0.004502], [1294, 3, 0.00020936993212911583, 0.010468496606455793, 2.22,\n 61.69, 0.004502], [1295, 3, 0.0002089734159756435, 0.010448670798782174,\n 2.22, 61.69, 0.004502], [1300, 3, 0.0007801536738897055, \n 0.03900768369448528, 2.22, 61.69, 0.004502], [1301, 2, \n 0.0019439262202234247, 0.09719631101117124, 0, 0, 0], [1302, 3, \n 0.00012789433882958004, 0.006394716941479003, 2.22, 61.69, 0.004502], [\n 1303, 3, 0.00010996863751682274, 0.005498431875841137, 2.22, 61.69, \n 0.004502], [1306, 3, 0.00011631130798083146, 0.005815565399041573, 2.22,\n 61.69, 0.004502], [1307, 3, 1.9031130574577255e-05, \n 0.0009515565287288628, 2.22, 61.69, 0.004502], [1308, 3, \n 0.0001224932857995621, 0.006124664289978106, 2.22, 61.69, 0.004502], [\n 1312, 2, 0.016696303623916272, 0.8348151811958137, 0, 0, 0], [1317, 3, \n 0.0015252502049763412, 0.07626251024881707, 2.22, 61.69, 0.004502], [\n 1319, 3, 0.001127343871228203, 0.05636719356141015, 2.22, 61.69, \n 0.004502], [1323, 2, 0.012675857799799822, 0.6337928899899912, 0, 0, 0],\n [1326, 2, 0.003288096915491701, 0.16440484577458506, 0, 0, 0], [1327, 2,\n 0.0032338308031027566, 0.16169154015513784, 0, 0, 0], [1328, 3, \n 0.0010226241895011407, 0.05113120947505704, 2.22, 61.69, 0.004502], [\n 1331, 3, 1.841349064624893e-05, 0.0009206745323124464, 2.22, 61.69, \n 0.004502], [1336, 3, 0.0008820603397680993, 0.04410301698840497, 2.22, \n 61.69, 0.004502], [1337, 2, 0.007722987880773172, 0.3861493940386586, 0,\n 0, 0], [1339, 3, 0.0006387594087649589, 0.03193797043824795, 2.22, \n 61.69, 0.004502], [1340, 2, 0.004462598113304154, 0.22312990566520774, \n 0, 0, 0], [1346, 2, 0.010970124373846759, 0.548506218692338, 0, 0, 0],\n [1348, 3, 0.0014456315404578254, 0.07228157702289127, 2.22, 61.69, \n 0.004502], [1349, 3, 0.0026962338610516797, 0.13481169305258398, 2.22, \n 61.69, 0.004502], [1356, 2, 0.0034369953484322496, 0.17184976742161248,\n 0, 0, 0], [1357, 2, 0.002662266539247354, 0.13311332696236772, 0, 0, 0],\n [1359, 2, 0.0023306710292170787, 0.11653355146085395, 0, 0, 0], [1360, \n 3, 0.0010909105792324338, 0.054545528961621695, 2.22, 61.69, 0.004502],\n [1361, 2, 0.0040238936307783425, 0.20119468153891715, 0, 0, 0], [1362, \n 2, 0.005036121783141224, 0.2518060891570612, 0, 0, 0], [1363, 3, \n 1.053265313635017e-06, 5.266326568175085e-05, 2.22, 61.69, 0.004502], [\n 1364, 3, 1.7153235992295212e-06, 8.576617996147605e-05, 2.22, 61.69, \n 0.004502], [1365, 3, 1.4382678391388228e-08, 7.191339195694115e-07, \n 2.22, 61.69, 0.004502], [1366, 3, 4.567454523924795e-05, \n 0.0022837272619623972, 2.22, 61.69, 0.004502], [1372, 2, \n 0.005918410111015705, 0.29592050555078525, 0, 0, 0], [1373, 3, \n 0.0010699135939801641, 0.05349567969900822, 2.22, 61.69, 0.004502], [\n 1374, 2, 0.006889508467327262, 0.3444754233663631, 0, 0, 0], [1375, 2, \n 0.003897629175102736, 0.1948814587551368, 0, 0, 0], [1376, 2, \n 0.011218109707548912, 0.5609054853774457, 0, 0, 0], [1377, 2, \n 0.01492085689824784, 0.7460428449123921, 0, 0, 0], [1378, 2, \n 0.014711861690612471, 0.7355930845306237, 0, 0, 0], [1379, 3, \n 4.570772978988336e-05, 0.0022853864894941682, 2.22, 61.69, 0.004502], [\n 1380, 3, 7.724465320438908e-05, 0.003862232660219454, 2.22, 61.69, \n 0.004502], [1381, 3, 5.9312910906981426e-05, 0.0029656455453490713, \n 2.22, 61.69, 0.004502], [1382, 2, 0.005563903887757258, \n 0.27819519438786294, 0, 0, 0], [1383, 2, 0.00682767638336331, \n 0.3413838191681655, 0, 0, 0], [1384, 3, 0.0002972463393517766, \n 0.014862316967588829, 2.22, 61.69, 0.004502], [1385, 3, \n 7.763953914385516e-06, 0.0003881976957192759, 2.22, 61.69, 0.004502], [\n 1386, 3, 4.2899112828393286e-05, 0.002144955641419664, 2.22, 61.69, \n 0.004502], [1387, 3, 0.00022240699424911273, 0.011120349712455638, 2.22,\n 61.69, 0.004502], [1388, 3, 5.909025672850305e-05, \n 0.0029545128364251525, 2.22, 61.69, 0.004502], [1389, 3, \n 1.3594135764164036e-05, 0.0006797067882082019, 2.22, 61.69, 0.004502],\n [1390, 3, 0.00023763846235409512, 0.011881923117704758, 2.22, 61.69, \n 0.004502], [1391, 3, 3.321367742134543e-05, 0.0016606838710672715, 2.22,\n 61.69, 0.004502], [1392, 3, 0.0012290826914265437, 0.06145413457132718,\n 2.22, 61.69, 0.004502], [1393, 3, 8.111443639320659e-05, \n 0.00405572181966033, 2.22, 61.69, 0.004502], [1394, 3, \n 6.656099436847732e-05, 0.0033280497184238656, 2.22, 61.69, 0.004502], [\n 1395, 3, 4.381412847320234e-06, 0.00021907064236601173, 2.22, 61.69, \n 0.004502], [1396, 3, 1.3808034609766036e-06, 6.904017304883018e-05, \n 2.22, 61.69, 0.004502], [1397, 3, 0.0015969317375463513, \n 0.07984658687731756, 2.22, 61.69, 0.004502], [1398, 3, \n 0.00017695743260373348, 0.008847871630186674, 2.22, 61.69, 0.004502], [\n 1399, 3, 0.0011375222056992432, 0.05687611028496216, 2.22, 61.69, \n 0.004502], [1400, 3, 7.618867997042728e-05, 0.0038094339985213638, 2.22,\n 61.69, 0.004502], [1401, 2, 0.005687529053514607, 0.28437645267573036, \n 0, 0, 0], [1402, 3, 0.001676149990745289, 0.08380749953726446, 2.22, \n 61.69, 0.004502], [1403, 2, 0.007617262031172502, 0.38086310155862513, \n 0, 0, 0], [1404, 2, 0.008581667499251882, 0.42908337496259413, 0, 0, 0],\n [1405, 3, 0.0018812625008740895, 0.09406312504370447, 2.22, 61.69, \n 0.004502], [1406, 3, 0.0006852566793279422, 0.03426283396639711, 2.22, \n 61.69, 0.004502], [1407, 3, 9.408131582260726e-06, \n 0.00047040657911303626, 2.22, 61.69, 0.004502], [1408, 3, \n 0.001981558589185328, 0.09907792945926643, 2.22, 61.69, 0.004502], [\n 1409, 3, 0.0005556437532243559, 0.027782187661217796, 2.22, 61.69, \n 0.004502], [1410, 3, 0.0018249000205853422, 0.09124500102926711, 2.22, \n 61.69, 0.004502], [1411, 3, 0.002128337887273, 0.10641689436365001, \n 2.22, 61.69, 0.004502], [1412, 3, 0.0001556187955145351, \n 0.007780939775726756, 2.22, 61.69, 0.004502], [1413, 3, \n 0.00014666682461596226, 0.007333341230798113, 2.22, 61.69, 0.004502], [\n 1414, 3, 0.000658771107384773, 0.032938555369238655, 2.22, 61.69, \n 0.004502], [1418, 2, 0.004554955356465112, 0.2277477678232556, 0, 0, 0],\n [1419, 3, 0.0015414725788113375, 0.07707362894056687, 2.22, 61.69, \n 0.004502], [1421, 3, 0.00017979168856692174, 0.008989584428346088, 2.22,\n 61.69, 0.004502], [1422, 3, 0.00012256633129127437, \n 0.006128316564563719, 2.22, 61.69, 0.004502], [1423, 3, \n 4.9296505077127586e-05, 0.0024648252538563794, 2.22, 61.69, 0.004502],\n [1424, 2, 0.01394783725195249, 0.6973918625976245, 0, 0, 0], [1425, 3, \n 0.0013602274146640447, 0.06801137073320224, 2.22, 61.69, 0.004502], [\n 1426, 2, 0.0041334084484743, 0.20667042242371503, 0, 0, 0], [1427, 2, \n 0.019959940478923573, 0.9979970239461788, 0, 0, 0], [1428, 2, \n 0.013355559786648664, 0.6677779893324334, 0, 0, 0], [1431, 2, \n 0.014493414492796078, 0.724670724639804, 0, 0, 0], [1432, 3, \n 0.0007676953741931287, 0.03838476870965644, 2.22, 61.69, 0.004502], [\n 1433, 2, 0.08207564315805406, 4.103782157902703, 0, 0, 0], [1434, 2, \n 0.006330547929406013, 0.3165273964703006, 0, 0, 0], [1435, 2, \n 0.005520334862536408, 0.2760167431268204, 0, 0, 0], [1436, 2, \n 0.006266510483771511, 0.31332552418857557, 0, 0, 0], [1437, 2, \n 0.006731984814882108, 0.3365992407441054, 0, 0, 0], [1438, 2, \n 0.0161133113991622, 0.8056655699581102, 0, 0, 0], [1439, 2, \n 0.0063091033600462575, 0.3154551680023129, 0, 0, 0], [1440, 3, \n 3.334110448446746e-05, 0.0016670552242233731, 2.22, 61.69, 0.004502], [\n 1443, 2, 0.006557506818224797, 0.3278753409112398, 0, 0, 0], [1446, 2, \n 0.024519578499182584, 1.2259789249591293, 0, 0, 0], [1447, 2, \n 0.0023268276390894026, 0.11634138195447014, 0, 0, 0], [1448, 3, \n 0.00047896583949883246, 0.023948291974941624, 2.22, 61.69, 0.004502], [\n 1449, 2, 0.006075750962706547, 0.3037875481353274, 0, 0, 0], [1450, 2, \n 0.0037724056227270084, 0.18862028113635043, 0, 0, 0], [1451, 2, \n 0.0043416728967246255, 0.21708364483623127, 0, 0, 0], [1452, 3, \n 0.0015322750739690742, 0.0766137536984537, 2.22, 61.69, 0.004502], [\n 1453, 2, 0.0016458121717638546, 0.08229060858819273, 0, 0, 0], [1454, 2,\n 0.004682929067992207, 0.2341464533996104, 0, 0, 0], [1455, 3, \n 4.166284213856912e-05, 0.0020831421069284557, 2.22, 61.69, 0.004502], [\n 1456, 2, 0.0031865889687578697, 0.15932944843789354, 0, 0, 0], [1457, 3,\n 0.00012749408723576006, 0.006374704361788003, 2.22, 61.69, 0.004502], [\n 1458, 3, 1.5673534819523866e-05, 0.0007836767409761935, 2.22, 61.69, \n 0.004502], [1459, 3, 0.00031178936740549625, 0.015589468370274815, 2.22,\n 61.69, 0.004502], [1460, 2, 0.003376889830190501, 0.16884449150952507, \n 0, 0, 0], [1461, 3, 0.001142843079861875, 0.05714215399309376, 2.22, \n 61.69, 0.004502], [1462, 3, 0.00015295973435731913, \n 0.007647986717865956, 2.22, 61.69, 0.004502], [1463, 3, \n 4.5276834778775515e-05, 0.002263841738938776, 2.22, 61.69, 0.004502], [\n 1464, 2, 0.006606826758650888, 0.33034133793254444, 0, 0, 0], [1465, 3,\n 0.0003374045759652472, 0.01687022879826236, 2.22, 61.69, 0.004502], [\n 1466, 3, 0.0003619193984034768, 0.01809596992017384, 2.22, 61.69, \n 0.004502], [1467, 3, 0.00013344536897072216, 0.006672268448536108, 2.22,\n 61.69, 0.004502], [1468, 3, 0.0015144656821575462, 0.0757232841078773, \n 2.22, 61.69, 0.004502], [1469, 2, 0.0033435340956597163, \n 0.16717670478298582, 0, 0, 0], [1470, 2, 0.005027084884666319, \n 0.2513542442333159, 0, 0, 0], [1471, 2, 0.010132763321185349, \n 0.5066381660592674, 0, 0, 0], [1472, 3, 0.0007626820845032627, \n 0.03813410422516314, 2.22, 61.69, 0.004502], [1473, 3, \n 0.0005323801851315335, 0.026619009256576683, 2.22, 61.69, 0.004502], [\n 1474, 3, 8.905977123682595e-05, 0.004452988561841298, 2.22, 61.69, \n 0.004502], [1475, 3, 2.4884191103347185e-05, 0.0012442095551673594, \n 2.22, 61.69, 0.004502], [1476, 2, 0.015946059282369706, \n 0.7973029641184852, 0, 0, 0], [1477, 3, 0.0007376196482685025, \n 0.03688098241342513, 2.22, 61.69, 0.004502], [1482, 3, \n 0.0004523453643744782, 0.02261726821872391, 2.22, 61.69, 0.004502], [\n 1483, 3, 0.0002291607516312977, 0.011458037581564884, 2.22, 61.69, \n 0.004502], [1484, 3, 1.9041073525508303e-06, 9.520536762754152e-05, \n 2.22, 61.69, 0.004502], [1485, 3, 3.5876538426778735e-05, \n 0.0017938269213389369, 2.22, 61.69, 0.004502], [1486, 3, \n 0.00018457774197472868, 0.009228887098736434, 2.22, 61.69, 0.004502], [\n 1489, 3, 7.571817467557017e-06, 0.00037859087337785094, 2.22, 61.69, \n 0.004502], [1490, 2, 0.04981318633597547, 2.4906593167987734, 0, 0, 0],\n [1491, 2, 0.0030920025676765685, 0.15460012838382842, 0, 0, 0], [1492, \n 2, 0.007177601132582883, 0.35888005662914413, 0, 0, 0], [1493, 2, \n 0.0027270697137500854, 0.13635348568750427, 0, 0, 0], [1494, 2, \n 0.016524815200932762, 0.8262407600466383, 0, 0, 0], [1495, 2, \n 0.002876525951460364, 0.1438262975730182, 0, 0, 0], [1500, 3, \n 5.0840365097147265e-06, 0.0002542018254857363, 2.22, 61.69, 0.004502],\n [1501, 3, 0.00020699592758045344, 0.010349796379022674, 2.22, 61.69, \n 0.004502], [1503, 3, 0.001165185399438024, 0.058259269971901194, 2.22, \n 61.69, 0.004502], [1504, 2, 0.004794675188738244, 0.23973375943691222, \n 0, 0, 0], [1512, 2, 0.001625501569891568, 0.08127507849457842, 0, 0, 0],\n [1513, 3, 0.0006523381548315222, 0.03261690774157611, 2.22, 61.69, \n 0.004502], [1518, 3, 3.7867310041709484e-05, 0.0018933655020854743, \n 2.22, 61.69, 0.004502], [1519, 3, 2.6282109502563467e-06, \n 0.00013141054751281735, 2.22, 61.69, 0.004502]]'], {}), '([[586, 1, 0.08658028904199107, 4.329014452099554, 0, 0, 0], [589, 1, \n 0.010042676909098597, 0.5021338454549299, 0, 0, 0], [590, 1, \n 0.012095775674984046, 0.6047887837492023, 0, 0, 0], [593, 1, \n 0.0017666198683200384, 0.08833099341600192, 0, 0, 0], [594, 1, \n 0.006047887837492023, 0.30239439187460115, 0, 0, 0], [595, 1, \n 1.50560576164933, 75.2802880824665, 0, 0, 0], [597, 1, \n 0.030239439187460113, 1.5119719593730057, 0, 0, 0], [598, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [599, 1, \n 0.0029602819415092537, 0.1480140970754627, 0, 0, 0], [601, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [602, 1, \n 0.007830423200121252, 0.39152116000606263, 0, 0, 0], [603, 1, \n 1.0997606567649967, 54.98803283824984, 0, 0, 0], [607, 1, \n 0.5729577951308232, 28.64788975654116, 0, 0, 0], [608, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [609, 1, \n 0.0057932399285449895, 0.2896619964272495, 0, 0, 0], [610, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [612, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [613, 1, \n 0.027056340325622208, 1.3528170162811104, 0, 0, 0], [614, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [616, 1, \n 0.0046154933496649645, 0.23077466748324824, 0, 0, 0], [617, 1, \n 0.04360845440717932, 2.1804227203589663, 0, 0, 0], [618, 1, \n 0.010631550198538607, 0.5315775099269304, 0, 0, 0], [619, 1, \n 0.037560566569687294, 1.8780283284843649, 0, 0, 0], [621, 1, \n 0.24350706293059987, 12.175353146529993, 0, 0, 0], [623, 1, \n 0.2419155134996809, 12.095775674984045, 0, 0, 0], [624, 1, \n 0.004297183463481174, 0.21485917317405873, 0, 0, 0], [628, 1, \n 0.14292113889652203, 7.1460569448261015, 0, 0, 0], [629, 1, \n 0.023968734429639437, 1.198436721481972, 0, 0, 0], [631, 1, \n 0.025401128917466494, 1.2700564458733248, 0, 0, 0], [632, 1, \n 0.01435577586688896, 0.717788793344448, 0, 0, 0], [637, 1, \n 0.017093240888069558, 0.854662044403478, 0, 0, 0], [638, 1, \n 0.02048324117592693, 1.0241620587963465, 0, 0, 0], [639, 1, \n 0.005029296201703893, 0.25146481008519467, 0, 0, 0], [640, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [641, 1, \n 0.0040107045659157625, 0.20053522829578813, 0, 0, 0], [642, 1, \n 0.00919915571071155, 0.4599577855355775, 0, 0, 0], [643, 1, \n 0.27279157245950864, 13.639578622975431, 0, 0, 0], [646, 1, \n 0.03278591827693044, 1.6392959138465222, 0, 0, 0], [647, 1, \n 0.00445633840657307, 0.2228169203286535, 0, 0, 0], [650, 1, \n 0.4216014442504307, 21.080072212521536, 0, 0, 0], [652, 1, \n 0.00746436683100989, 0.37321834155049455, 0, 0, 0], [655, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [657, 1, \n 0.012095775674984046, 0.6047887837492023, 0, 0, 0], [658, 1, \n 0.030239439187460113, 1.5119719593730057, 0, 0, 0], [661, 1, \n 0.010408733278209955, 0.5204366639104978, 0, 0, 0], [662, 1, \n 0.002928450952890874, 0.1464225476445437, 0, 0, 0], [663, 1, \n 0.00238732414637843, 0.1193662073189215, 0, 0, 0], [666, 1, \n 0.00919915571071155, 0.4599577855355775, 0, 0, 0], [668, 1, \n 0.24382537281678363, 12.191268640839182, 0, 0, 0], [670, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [672, 1, \n 0.010536057232683471, 0.5268028616341736, 0, 0, 0], [676, 1, \n 0.11777465788800255, 5.888732894400127, 0, 0, 0], [677, 1, \n 0.004265352474862795, 0.21326762374313976, 0, 0, 0], [678, 1, \n 0.3237211542489151, 16.186057712445756, 0, 0, 0], [679, 1, \n 0.2212253708977345, 11.061268544886726, 0, 0, 0], [681, 1, \n 0.0063821132179850025, 0.31910566089925013, 0, 0, 0], [683, 1, \n 0.008753521870054244, 0.4376760935027122, 0, 0, 0], [687, 1, \n 0.42303383873825773, 21.151691936912886, 0, 0, 0], [689, 1, \n 0.09867606471697511, 4.933803235848756, 0, 0, 0], [691, 1, \n 0.008276057040778557, 0.4138028520389279, 0, 0, 0], [693, 1, \n 0.06175211791965539, 3.0876058959827692, 0, 0, 0], [694, 1, \n 0.005220282133414166, 0.2610141066707083, 0, 0, 0], [695, 1, \n 0.004679155326901723, 0.23395776634508614, 0, 0, 0], [696, 1, \n 0.22950142793851305, 11.475071396925653, 0, 0, 0], [697, 1, \n 0.0036923946797319715, 0.1846197339865986, 0, 0, 0], [698, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [699, 1, \n 0.033295214094824506, 1.6647607047412254, 0, 0, 0], [700, 1, \n 0.008594366926962348, 0.42971834634811745, 0, 0, 0], [701, 1, \n 0.015024226627874922, 0.7512113313937461, 0, 0, 0], [702, 1, \n 0.023363945645890238, 1.168197282294512, 0, 0, 0], [704, 1, \n 0.16170142218136566, 8.085071109068283, 0, 0, 0], [705, 1, \n 0.005411268065124442, 0.27056340325622213, 0, 0, 0], [707, 1, \n 0.010822536130248884, 0.5411268065124443, 0, 0, 0], [708, 1, \n 0.0024828171122335675, 0.12414085561167837, 0, 0, 0], [711, 1, \n 0.056054370956965534, 2.802718547848277, 0, 0, 0], [713, 1, \n 0.004265352474862795, 0.21326762374313976, 0, 0, 0], [714, 1, \n 0.00477464829275686, 0.238732414637843, 0, 0, 0], [716, 1, \n 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0], [717, 1, \n 0.0017507043740108488, 0.08753521870054244, 0, 0, 0], [719, 1, \n 0.623250757147862, 31.162537857393104, 0, 0, 0], [721, 1, \n 0.0012732395447351628, 0.06366197723675814, 0, 0, 0], [722, 1, \n 0.006589014644004467, 0.3294507322002233, 0, 0, 0], [723, 1, \n 0.006270704757820675, 0.31353523789103377, 0, 0, 0], [724, 1, \n 0.0019257748114119334, 0.09628874057059668, 0, 0, 0], [725, 1, \n 0.25464790894703254, 12.732395447351628, 0, 0, 0], [726, 1, \n 0.040107045659157625, 2.0053522829578814, 0, 0, 0], [727, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [728, 1, \n 0.16233804195373325, 8.116902097686662, 0, 0, 0], [730, 1, \n 0.10077690996578814, 5.038845498289407, 0, 0, 0], [731, 1, \n 0.2848873481344926, 14.244367406724633, 0, 0, 0], [732, 1, \n 0.004647324338283344, 0.2323662169141672, 0, 0, 0], [733, 1, \n 0.12624170086049138, 6.312085043024569, 0, 0, 0], [735, 1, \n 0.013496339174192726, 0.6748169587096363, 0, 0, 0], [736, 1, \n 0.010185916357881302, 0.5092958178940651, 0, 0, 0], [737, 1, \n 0.00891267681314614, 0.445633840657307, 0, 0, 0], [738, 1, \n 0.04408591923645501, 2.2042959618227504, 0, 0, 0], [739, 1, \n 0.01906676218240906, 0.9533381091204531, 0, 0, 0], [741, 1, \n 0.0340591578216656, 1.7029578910832803, 0, 0, 0], [742, 1, \n 0.0028647889756541157, 0.14323944878270578, 0, 0, 0], [743, 1, \n 0.44881693951914486, 22.440846975957243, 0, 0, 0], [745, 1, \n 0.013369015219719208, 0.6684507609859605, 0, 0, 0], [746, 1, \n 0.03183098861837907, 1.5915494309189535, 0, 0, 0], [747, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [748, 1, \n 0.03501408748021698, 1.7507043740108488, 0, 0, 0], [749, 1, \n 0.0025464790894703256, 0.12732395447351627, 0, 0, 0], [750, 1, \n 0.028902537665488188, 1.4451268832744095, 0, 0, 0], [758, 1, \n 0.0058887328944001276, 0.2944366447200064, 0, 0, 0], [760, 1, \n 0.2527380496299298, 12.636902481496492, 0, 0, 0], [761, 1, \n 0.004997465213085514, 0.2498732606542757, 0, 0, 0], [762, 1, \n 0.3517324242330887, 17.586621211654435, 0, 0, 0], [763, 1, \n 0.006461690689530951, 0.32308453447654756, 0, 0, 0], [765, 1, \n 0.018780283284843647, 0.9390141642421824, 0, 0, 0], [767, 1, \n 0.0035650707252584553, 0.17825353626292276, 0, 0, 0], [769, 1, \n 0.013782818071758136, 0.6891409035879068, 0, 0, 0], [771, 1, \n 0.21963382146681557, 10.981691073340778, 0, 0, 0], [772, 1, \n 0.002992112930127632, 0.1496056465063816, 0, 0, 0], [774, 1, \n 0.010663381187156987, 0.5331690593578494, 0, 0, 0], [775, 1, \n 0.04074366543152521, 2.0371832715762603, 0, 0, 0], [776, 1, \n 0.01782535362629228, 0.891267681314614, 0, 0, 0], [777, 1, \n 0.012573240504259732, 0.6286620252129866, 0, 0, 0], [778, 1, \n 0.004679155326901723, 0.23395776634508614, 0, 0, 0], [779, 1, \n 0.010886198107485642, 0.5443099053742821, 0, 0, 0], [781, 1, \n 0.4169859509007658, 20.84929754503829, 0, 0, 0], [784, 1, \n 0.4058451048843331, 20.292255244216655, 0, 0, 0], [785, 1, \n 0.00047746482927568597, 0.0238732414637843, 0, 0, 0], [786, 1, \n 0.0621977517603127, 3.109887588015635, 0, 0, 0], [787, 1, \n 0.24764509145098912, 12.382254572549456, 0, 0, 0], [788, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [789, 1, \n 0.0123185925953127, 0.615929629765635, 0, 0, 0], [791, 1, \n 0.0031830988618379067, 0.15915494309189535, 0, 0, 0], [792, 1, \n 0.009979014931861837, 0.49895074659309185, 0, 0, 0], [793, 1, \n 0.0031194368846011486, 0.15597184423005744, 0, 0, 0], [794, 1, \n 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0], [795, 1, \n 0.004329014452099553, 0.2164507226049777, 0, 0, 0], [796, 1, \n 0.027088171314240586, 1.3544085657120293, 0, 0, 0], [798, 1, \n 0.10179550160157626, 5.089775080078813, 0, 0, 0], [800, 1, \n 0.0058091554228541795, 0.290457771142709, 0, 0, 0], [801, 1, \n 0.007957747154594767, 0.3978873577297384, 0, 0, 0], [802, 1, \n 0.07957747154594767, 3.9788735772973833, 0, 0, 0], [805, 1, \n 0.44881693951914486, 22.440846975957243, 0, 0, 0], [806, 1, \n 0.005697746962689853, 0.2848873481344927, 0, 0, 0], [808, 1, \n 0.034616200122487235, 1.7308100061243619, 0, 0, 0], [809, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [811, 1, \n 0.0040107045659157625, 0.20053522829578813, 0, 0, 0], [814, 1, \n 0.014164789935178685, 0.7082394967589343, 0, 0, 0], [816, 1, \n 0.012748310941660816, 0.6374155470830408, 0, 0, 0], [817, 1, \n 0.017188733853924696, 0.8594366926962349, 0, 0, 0], [818, 1, \n 0.24096058384112953, 12.048029192056477, 0, 0, 0], [821, 1, \n 0.013130282805081364, 0.6565141402540683, 0, 0, 0], [822, 1, \n 0.04265352474862795, 2.1326762374313977, 0, 0, 0], [825, 1, \n 0.013591832140047864, 0.6795916070023932, 0, 0, 0], [826, 1, \n 0.018461973398659858, 0.9230986699329929, 0, 0, 0], [830, 1, \n 0.02832957987035737, 1.4164789935178685, 0, 0, 0], [833, 1, \n 0.0059205638830185075, 0.2960281941509254, 0, 0, 0], [834, 1, \n 0.007416620348082323, 0.37083101740411617, 0, 0, 0], [835, 1, \n 0.010138169874953733, 0.5069084937476867, 0, 0, 0], [836, 1, \n 0.008116902097686661, 0.4058451048843331, 0, 0, 0], [837, 1, \n 0.15024226627874918, 7.512113313937459, 0, 0, 0], [839, 1, \n 0.011666057328635928, 0.5833028664317964, 0, 0, 0], [840, 1, \n 0.4427690516816528, 22.138452584082643, 0, 0, 0], [841, 1, \n 0.0037083101740411615, 0.18541550870205808, 0, 0, 0], [843, 1, \n 0.10599719209920229, 5.2998596049601145, 0, 0, 0], [844, 1, \n 0.012732395447351627, 0.6366197723675814, 0, 0, 0], [845, 1, \n 0.10122254380644544, 5.061127190322272, 0, 0, 0], [848, 1, \n 0.013369015219719208, 0.6684507609859605, 0, 0, 0], [849, 1, \n 0.24796340133717296, 12.398170066858649, 0, 0, 0], [850, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [851, 1, \n 0.01265281797580568, 0.632640898790284, 0, 0, 0], [852, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [853, 1, \n 0.0036923946797319715, 0.1846197339865986, 0, 0, 0], [855, 1, \n 0.21899720169444797, 10.949860084722399, 0, 0, 0], [856, 1, \n 0.011459155902616463, 0.5729577951308231, 0, 0, 0], [857, 1, \n 0.4462704604296745, 22.313523021483725, 0, 0, 0], [858, 1, \n 0.01808000153523931, 0.9040000767619655, 0, 0, 0], [859, 1, \n 0.027056340325622208, 1.3528170162811104, 0, 0, 0], [860, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [862, 1, \n 0.23077466748324824, 11.538733374162412, 0, 0, 0], [863, 1, \n 0.0001909859317102744, 0.00954929658551372, 0, 0, 0], [864, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [865, 1, \n 0.0035014087480216977, 0.17507043740108488, 0, 0, 0], [866, 1, \n 0.08290062675770644, 4.145031337885323, 0, 0, 0], [867, 1, \n 0.24478030247533505, 12.239015123766753, 0, 0, 0], [869, 1, \n 0.4329014452099553, 21.645072260497766, 0, 0, 0], [870, 1, \n 0.018589297353133374, 0.9294648676566688, 0, 0, 0], [872, 1, \n 0.00716197243913529, 0.3580986219567645, 0, 0, 0], [873, 1, \n 0.038833806114422456, 1.941690305721123, 0, 0, 0], [874, 1, \n 0.006589014644004467, 0.3294507322002233, 0, 0, 0], [875, 1, \n 0.007766761222884492, 0.38833806114422464, 0, 0, 0], [876, 1, \n 0.018589297353133374, 0.9294648676566688, 0, 0, 0], [877, 1, \n 0.007894085177358009, 0.39470425886790045, 0, 0, 0], [881, 1, \n 0.3187236890358296, 15.93618445179148, 0, 0, 0], [882, 1, \n 0.005538592019597957, 0.2769296009798979, 0, 0, 0], [883, 1, \n 0.005729577951308231, 0.28647889756541156, 0, 0, 0], [885, 1, \n 0.15597184423005742, 7.798592211502871, 0, 0, 0], [886, 1, \n 0.8186930272647096, 40.93465136323548, 0, 0, 0], [888, 1, \n 0.011172677005051054, 0.5586338502525526, 0, 0, 0], [889, 1, \n 0.0030239439187460114, 0.15119719593730058, 0, 0, 0], [890, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [895, 1, \n 0.0030239439187460114, 0.15119719593730058, 0, 0, 0], [896, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [897, 1, \n 0.01782535362629228, 0.891267681314614, 0, 0, 0], [898, 1, \n 0.013464508185574344, 0.6732254092787172, 0, 0, 0], [899, 1, \n 0.002705634032562221, 0.13528170162811107, 0, 0, 0], [900, 1, \n 0.03584169318429482, 1.7920846592147412, 0, 0, 0], [902, 1, \n 0.006207042780583919, 0.31035213902919595, 0, 0, 0], [903, 1, \n 0.0031990143561470966, 0.15995071780735484, 0, 0, 0], [905, 1, \n 0.021851973686517232, 1.0925986843258617, 0, 0, 0], [906, 1, \n 0.010504226244065093, 0.5252113122032547, 0, 0, 0], [907, 1, \n 0.02142225534016911, 1.0711127670084555, 0, 0, 0], [909, 1, \n 0.005856901905781748, 0.2928450952890874, 0, 0, 0], [913, 1, \n 0.02355493157760051, 1.1777465788800257, 0, 0, 0], [915, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [917, 1, \n 0.005411268065124442, 0.27056340325622213, 0, 0, 0], [918, 1, \n 0.012254930618075942, 0.612746530903797, 0, 0, 0], [920, 1, \n 0.0020371832715762603, 0.10185916357881303, 0, 0, 0], [921, 1, \n 0.019735212943395024, 0.9867606471697512, 0, 0, 0], [922, 1, \n 0.05220282133414166, 2.6101410667070835, 0, 0, 0], [923, 1, \n 0.023236621691416718, 1.161831084570836, 0, 0, 0], [924, 1, \n 0.0037242256683503506, 0.18621128341751753, 0, 0, 0], [925, 1, \n 0.008276057040778557, 0.4138028520389279, 0, 0, 0], [928, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [931, 1, \n 0.03455253814525047, 1.7276269072625237, 0, 0, 0], [934, 1, \n 0.09421972631040204, 4.710986315520103, 0, 0, 0], [935, 1, \n 0.007352958370845565, 0.36764791854227824, 0, 0, 0], [936, 1, \n 0.016615776058793875, 0.8307888029396938, 0, 0, 0], [937, 1, \n 0.00477464829275686, 0.238732414637843, 0, 0, 0], [939, 1, \n 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0], [940, 1, \n 0.009421972631040205, 0.47109863155201026, 0, 0, 0], [942, 1, \n 0.016520283092938737, 0.8260141546469368, 0, 0, 0], [944, 1, \n 0.004042535554534142, 0.2021267777267071, 0, 0, 0], [945, 1, \n 0.011140846016432674, 0.5570423008216338, 0, 0, 0], [948, 1, \n 0.025146481008519465, 1.2573240504259733, 0, 0, 0], [950, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [952, 1, \n 0.005045211696013082, 0.2522605848006541, 0, 0, 0], [956, 1, \n 0.020690142601946394, 1.0345071300973196, 0, 0, 0], [957, 1, \n 0.0019098593171027439, 0.0954929658551372, 0, 0, 0], [958, 1, \n 0.010615634704229418, 0.530781735211471, 0, 0, 0], [959, 1, \n 0.007241549910681238, 0.3620774955340619, 0, 0, 0], [960, 1, \n 0.004217605991935227, 0.21088029959676136, 0, 0, 0], [963, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [965, 1, \n 0.11204507993669433, 5.602253996834716, 0, 0, 0], [966, 1, \n 0.021008452488130186, 1.0504226244065094, 0, 0, 0], [967, 1, \n 0.01193662073189215, 0.5968310365946076, 0, 0, 0], [968, 1, \n 0.017188733853924696, 0.8594366926962349, 0, 0, 0], [969, 1, \n 0.018111832523857688, 0.9055916261928845, 0, 0, 0], [971, 1, \n 0.0031830988618379067, 0.15915494309189535, 0, 0, 0], [972, 1, \n 0.12414085561167836, 6.207042780583918, 0, 0, 0], [973, 1, \n 0.4287634166895661, 21.438170834478306, 0, 0, 0], [975, 1, \n 0.01671126902464901, 0.8355634512324506, 0, 0, 0], [976, 1, \n 0.008562535938343968, 0.4281267969171984, 0, 0, 0], [977, 1, \n 0.1031324031235482, 5.15662015617741, 0, 0, 0], [978, 1, \n 0.0007321127382227185, 0.03660563691113593, 0, 0, 0], [981, 1, \n 0.03787887645587108, 1.8939438227935543, 0, 0, 0], [982, 1, \n 0.0015756339366097638, 0.07878169683048819, 0, 0, 0], [983, 1, \n 0.01400563499208679, 0.7002817496043395, 0, 0, 0], [984, 1, \n 0.14801409707546268, 7.400704853773133, 0, 0, 0], [985, 1, \n 0.0035014087480216977, 0.17507043740108488, 0, 0, 0], [986, 1, \n 0.0017825353626292277, 0.08912676813146138, 0, 0, 0], [987, 1, \n 0.02618098813861678, 1.3090494069308392, 0, 0, 0], [988, 1, \n 0.0008116902097686662, 0.04058451048843331, 0, 0, 0], [990, 1, \n 0.0954929658551372, 4.7746482927568605, 0, 0, 0], [993, 1, \n 0.06238873769202297, 3.119436884601149, 0, 0, 0], [994, 1, \n 0.010504226244065093, 0.5252113122032547, 0, 0, 0], [995, 1, \n 0.0006684507609859605, 0.033422538049298026, 0, 0, 0], [996, 1, \n 0.003660563691113593, 0.18302818455567965, 0, 0, 0], [997, 1, \n 0.005984225860255264, 0.2992112930127632, 0, 0, 0], [998, 1, \n 0.13464508185574348, 6.732254092787174, 0, 0, 0], [999, 1, \n 0.004965634224467135, 0.24828171122335674, 0, 0, 0], [1000, 1, \n 0.015597184423005743, 0.7798592211502873, 0, 0, 0], [1002, 1, \n 0.0031512678732195276, 0.15756339366097638, 0, 0, 0], [1003, 1, \n 0.2864788975654116, 14.32394487827058, 0, 0, 0], [1007, 1, \n 0.007416620348082323, 0.37083101740411617, 0, 0, 0], [1008, 1, \n 0.015597184423005743, 0.7798592211502873, 0, 0, 0], [1010, 1, \n 0.238732414637843, 11.93662073189215, 0, 0, 0], [1011, 1, \n 0.005952394871636886, 0.2976197435818443, 0, 0, 0], [1012, 1, \n 0.9024085273310466, 45.12042636655233, 0, 0, 0], [1018, 1, \n 0.05599070897972878, 2.7995354489864392, 0, 0, 0], [1019, 1, \n 0.03819718634205488, 1.909859317102744, 0, 0, 0], [1023, 1, \n 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0], [1025, 1, \n 0.03616000307047862, 1.808000153523931, 0, 0, 0], [1026, 1, \n 0.20868396138209316, 10.434198069104658, 0, 0, 0], [1027, 3, \n 0.003074873500535418, 0.15374367502677092, 2.22, 61.69, 0.004502], [\n 1028, 2, 0.025464790894703257, 1.273239544735163, 0, 0, 0], [1029, 2, \n 0.003819718634205488, 0.19098593171027442, 0, 0, 0], [1030, 2, \n 0.06480789282701978, 3.2403946413509894, 0, 0, 0], [1031, 2, \n 0.0921316134570364, 4.60658067285182, 0, 0, 0], [1032, 2, \n 0.009772775025341927, 0.4886387512670964, 0, 0, 0], [1033, 2, \n 0.0026465383981801338, 0.1323269199090067, 0, 0, 0], [1034, 2, \n 0.005364335122251813, 0.26821675611259066, 0, 0, 0], [1035, 3, \n 0.00317587127473044, 0.158793563736522, 2.22, 61.69, 0.004502], [1036, \n 2, 0.003979401770097368, 0.19897008850486841, 0, 0, 0], [1037, 2, \n 0.0060277734620055035, 0.3013886731002752, 0, 0, 0], [1038, 2, \n 0.005462103769994554, 0.2731051884997277, 0, 0, 0], [1039, 2, \n 0.005913400500746229, 0.2956700250373115, 0, 0, 0], [1041, 2, \n 0.008736705901893021, 0.4368352950946511, 0, 0, 0], [1042, 2, \n 0.002236240074990482, 0.1118120037495241, 0, 0, 0], [1044, 3, \n 0.0023022419250361527, 0.11511209625180763, 2.22, 61.69, 0.004502], [\n 1046, 2, 0.00679827557108513, 0.33991377855425653, 0, 0, 0], [1047, 3, \n 0.0008294889076348922, 0.04147444538174461, 2.22, 61.69, 0.004502], [\n 1048, 2, 0.004561818873896339, 0.22809094369481697, 0, 0, 0], [1049, 2,\n 0.01870104799381521, 0.9350523996907605, 0, 0, 0], [1050, 2, \n 0.001674586221361763, 0.08372931106808816, 0, 0, 0], [1051, 2, \n 0.008610220286935111, 0.4305110143467556, 0, 0, 0], [1052, 3, \n 0.001315809692296204, 0.06579048461481019, 2.22, 61.69, 0.004502], [\n 1053, 3, 0.001042024786453249, 0.05210123932266245, 2.22, 61.69, \n 0.004502], [1054, 2, 0.017434200209443074, 0.8717100104721537, 0, 0, 0],\n [1055, 3, 0.00011871244168422753, 0.005935622084211377, 2.22, 61.69, \n 0.004502], [1056, 2, 0.022965347424951363, 1.1482673712475682, 0, 0, 0],\n [1057, 2, 0.02718238967557453, 1.3591194837787268, 0, 0, 0], [1058, 2, \n 0.04461485898591968, 2.2307429492959843, 0, 0, 0], [1059, 2, \n 0.017013874249418158, 0.8506937124709079, 0, 0, 0], [1060, 3, \n 0.0003260901937161927, 0.016304509685809633, 2.22, 61.69, 0.004502], [\n 1061, 2, 0.005436368167959151, 0.27181840839795757, 0, 0, 0], [1062, 3,\n 0.00011488919588970951, 0.005744459794485476, 2.22, 61.69, 0.004502], [\n 1063, 3, 0.0003425274483539234, 0.01712637241769617, 2.22, 61.69, \n 0.004502], [1064, 2, 0.008174693602404245, 0.4087346801202122, 0, 0, 0],\n [1065, 2, 0.014487023099809197, 0.7243511549904599, 0, 0, 0], [1066, 2,\n 0.005099925117482008, 0.2549962558741004, 0, 0, 0], [1067, 3, \n 0.0008283924405670749, 0.04141962202835374, 2.22, 61.69, 0.004502], [\n 1072, 2, 0.007168748144119091, 0.3584374072059546, 0, 0, 0], [1073, 2, \n 0.004954025493475761, 0.24770127467378808, 0, 0, 0], [1074, 2, \n 0.009778033156939965, 0.48890165784699824, 0, 0, 0], [1077, 3, \n 0.0007664457877913659, 0.0383222893895683, 2.22, 61.69, 0.004502], [\n 1079, 2, 0.004604543003215469, 0.23022715016077344, 0, 0, 0], [1080, 2,\n 0.003349647097644276, 0.1674823548822138, 0, 0, 0], [1081, 2, \n 0.01601172754276052, 0.800586377138026, 0, 0, 0], [1082, 2, \n 0.016964047639621335, 0.8482023819810669, 0, 0, 0], [1083, 2, \n 0.02162958181089171, 1.0814790905445855, 0, 0, 0], [1084, 2, \n 0.019857016058101837, 0.992850802905092, 0, 0, 0], [1085, 2, \n 0.005758465971105609, 0.2879232985552805, 0, 0, 0], [1086, 2, \n 0.011188498437811297, 0.5594249218905649, 0, 0, 0], [1087, 2, \n 0.00397539235779677, 0.19876961788983852, 0, 0, 0], [1088, 3, \n 0.0013881136481632718, 0.06940568240816358, 2.22, 61.69, 0.004502], [\n 1089, 2, 0.01263503907246808, 0.631751953623404, 0, 0, 0], [1090, 2, \n 0.005674885746854652, 0.2837442873427326, 0, 0, 0], [1091, 3, \n 0.002915330196419503, 0.14576650982097517, 2.22, 61.69, 0.004502], [\n 1092, 2, 0.003437876146252996, 0.1718938073126498, 0, 0, 0], [1093, 2, \n 0.009906140914748767, 0.49530704573743833, 0, 0, 0], [1094, 3, \n 0.00023930778294026586, 0.011965389147013294, 2.22, 61.69, 0.004502], [\n 1095, 3, 1.3047613994501091e-05, 0.0006523806997250545, 2.22, 61.69, \n 0.004502], [1096, 2, 0.005379826679377905, 0.2689913339688953, 0, 0, 0],\n [1097, 3, 0.0002929164939619051, 0.014645824698095257, 2.22, 61.69, \n 0.004502], [1098, 2, 0.004521623727146264, 0.22608118635731317, 0, 0, 0\n ], [1099, 2, 0.018521637260932335, 0.9260818630466169, 0, 0, 0], [1101,\n 2, 0.005343192104787693, 0.2671596052393847, 0, 0, 0], [1102, 2, \n 0.02234407998394998, 1.1172039991974991, 0, 0, 0], [1103, 2, \n 0.01562148424141561, 0.7810742120707805, 0, 0, 0], [1104, 3, \n 1.3172819714966009e-05, 0.0006586409857483004, 2.22, 61.69, 0.004502],\n [1105, 3, 0.0001386935566767763, 0.006934677833838815, 2.22, 61.69, \n 0.004502], [1106, 3, 0.00014577275883068604, 0.0072886379415343025, \n 2.22, 61.69, 0.004502], [1107, 2, 0.003391514823097816, \n 0.16957574115489077, 0, 0, 0], [1108, 2, 0.015741564572850766, \n 0.7870782286425384, 0, 0, 0], [1109, 3, 4.9542410867097304e-05, \n 0.002477120543354865, 2.22, 61.69, 0.004502], [1110, 3, \n 0.00010533237807450261, 0.00526661890372513, 2.22, 61.69, 0.004502], [\n 1111, 2, 0.003682113867455725, 0.18410569337278626, 0, 0, 0], [1112, 2,\n 0.004426690383932842, 0.2213345191966421, 0, 0, 0], [1113, 3, \n 0.00022513170529279912, 0.011256585264639957, 2.22, 61.69, 0.004502], [\n 1114, 3, 0.0005550707533170727, 0.027753537665853634, 2.22, 61.69, \n 0.004502], [1115, 2, 0.0032197222090973076, 0.16098611045486538, 0, 0, \n 0], [1116, 3, 0.002075453185310181, 0.10377265926550905, 2.22, 61.69, \n 0.004502], [1117, 2, 0.005780032679669937, 0.2890016339834969, 0, 0, 0],\n [1118, 3, 0.0005199122415272909, 0.025995612076364544, 2.22, 61.69, \n 0.004502], [1119, 3, 0.0027536366373517632, 0.13768183186758817, 2.22, \n 61.69, 0.004502], [1120, 3, 0.0001538074296570127, 0.007690371482850636,\n 2.22, 61.69, 0.004502], [1121, 3, 3.4414977793908876e-05, \n 0.0017207488896954439, 2.22, 61.69, 0.004502], [1122, 3, \n 9.313004041299959e-05, 0.00465650202064998, 2.22, 61.69, 0.004502], [\n 1123, 3, 8.936930538294867e-05, 0.004468465269147434, 2.22, 61.69, \n 0.004502], [1124, 3, 8.201464578534214e-05, 0.004100732289267108, 2.22,\n 61.69, 0.004502], [1125, 3, 0.001588801827253252, 0.07944009136266261, \n 2.22, 61.69, 0.004502], [1126, 3, 0.0018426380603240493, \n 0.09213190301620247, 2.22, 61.69, 0.004502], [1127, 2, \n 0.006703391093283916, 0.3351695546641958, 0, 0, 0], [1128, 3, \n 0.0001948941120002845, 0.009744705600014225, 2.22, 61.69, 0.004502], [\n 1129, 3, 0.0003016780123772693, 0.015083900618863466, 2.22, 61.69, \n 0.004502], [1130, 3, 6.530151955301432e-05, 0.003265075977650716, 2.22,\n 61.69, 0.004502], [1131, 3, 0.00018443373362804407, \n 0.009221686681402204, 2.22, 61.69, 0.004502], [1132, 3, \n 2.2886271300209156e-05, 0.0011443135650104578, 2.22, 61.69, 0.004502],\n [1133, 3, 4.5810964480308454e-05, 0.002290548224015423, 2.22, 61.69, \n 0.004502], [1134, 3, 3.236913111220881e-05, 0.0016184565556104404, 2.22,\n 61.69, 0.004502], [1135, 3, 0.0004528723014143959, 0.022643615070719797,\n 2.22, 61.69, 0.004502], [1136, 3, 2.5636662405410735e-05, \n 0.0012818331202705368, 2.22, 61.69, 0.004502], [1137, 3, \n 0.00016790582753671083, 0.008395291376835541, 2.22, 61.69, 0.004502], [\n 1138, 3, 7.98498118498449e-05, 0.003992490592492246, 2.22, 61.69, \n 0.004502], [1139, 3, 0.0012619566606414858, 0.0630978330320743, 2.22, \n 61.69, 0.004502], [1140, 3, 0.0013901725262468376, 0.06950862631234189,\n 2.22, 61.69, 0.004502], [1141, 2, 0.0076053500901520025, \n 0.38026750450760016, 0, 0, 0], [1142, 3, 7.73959943559724e-05, \n 0.00386979971779862, 2.22, 61.69, 0.004502], [1143, 3, \n 0.001326344775515579, 0.06631723877577896, 2.22, 61.69, 0.004502], [\n 1144, 2, 0.00334399697192306, 0.16719984859615303, 0, 0, 0], [1145, 2, \n 0.011197481443497569, 0.5598740721748785, 0, 0, 0], [1146, 3, \n 5.4833151376821656e-05, 0.002741657568841083, 2.22, 61.69, 0.004502], [\n 1147, 3, 0.002909588342312674, 0.14547941711563372, 2.22, 61.69, \n 0.004502], [1148, 3, 0.0011233492673683868, 0.05616746336841934, 2.22, \n 61.69, 0.004502], [1149, 3, 0.0005447417794635118, 0.02723708897317559,\n 2.22, 61.69, 0.004502], [1150, 3, 0.0002306193019977063, \n 0.011530965099885314, 2.22, 61.69, 0.004502], [1151, 3, \n 0.0008299047575760064, 0.04149523787880033, 2.22, 61.69, 0.004502], [\n 1152, 3, 7.417749437366368e-06, 0.0003708874718683184, 2.22, 61.69, \n 0.004502], [1153, 3, 4.283385139953296e-06, 0.0002141692569976648, 2.22,\n 61.69, 0.004502], [1154, 3, 1.0001936259040478e-05, \n 0.0005000968129520238, 2.22, 61.69, 0.004502], [1155, 3, \n 3.879887736397654e-05, 0.001939943868198827, 2.22, 61.69, 0.004502], [\n 1156, 3, 0.0010200134924871187, 0.05100067462435595, 2.22, 61.69, \n 0.004502], [1157, 3, 0.00027719360593007886, 0.013859680296503944, 2.22,\n 61.69, 0.004502], [1158, 3, 6.640198284893194e-05, 0.003320099142446597,\n 2.22, 61.69, 0.004502], [1159, 3, 0.0008593149079194712, \n 0.04296574539597356, 2.22, 61.69, 0.004502], [1160, 2, \n 0.015175599618213626, 0.7587799809106813, 0, 0, 0], [1161, 3, \n 0.0008335971783564253, 0.04167985891782127, 2.22, 61.69, 0.004502], [\n 1162, 2, 0.02334015009089389, 1.1670075045446946, 0, 0, 0], [1163, 2, \n 0.014481760844263846, 0.7240880422131923, 0, 0, 0], [1164, 2, \n 0.01586368621264448, 0.793184310632224, 0, 0, 0], [1165, 2, \n 0.0025257844262807964, 0.12628922131403983, 0, 0, 0], [1166, 2, \n 0.005301588846150501, 0.26507944230752506, 0, 0, 0], [1167, 3, \n 0.00032173361521807824, 0.016086680760903912, 2.22, 61.69, 0.004502], [\n 1168, 3, 8.56746647323757e-05, 0.004283733236618785, 2.22, 61.69, \n 0.004502], [1169, 3, 0.00017327803824915608, 0.008663901912457804, 2.22,\n 61.69, 0.004502], [1170, 3, 1.6933420442211857e-05, \n 0.000846671022110593, 2.22, 61.69, 0.004502], [1173, 2, \n 0.01618626952698487, 0.8093134763492436, 0, 0, 0], [1174, 3, \n 8.021928882473966e-05, 0.004010964441236983, 2.22, 61.69, 0.004502], [\n 1175, 3, 5.445989361520192e-05, 0.002722994680760096, 2.22, 61.69, \n 0.004502], [1176, 3, 1.4783581244732665e-05, 0.0007391790622366333, \n 2.22, 61.69, 0.004502], [1177, 3, 0.0017745146198091144, \n 0.08872573099045572, 2.22, 61.69, 0.004502], [1178, 3, \n 0.0001923728167601116, 0.00961864083800558, 2.22, 61.69, 0.004502], [\n 1179, 3, 8.316119408334767e-05, 0.004158059704167384, 2.22, 61.69, \n 0.004502], [1180, 3, 4.3834108298364086e-05, 0.002191705414918204, 2.22,\n 61.69, 0.004502], [1181, 2, 0.00545834972439398, 0.272917486219699, 0, \n 0, 0], [1182, 2, 0.006322880792722177, 0.3161440396361089, 0, 0, 0], [\n 1183, 3, 0.00177138301503702, 0.08856915075185101, 2.22, 61.69, \n 0.004502], [1184, 3, 0.0002382585530365376, 0.01191292765182688, 2.22, \n 61.69, 0.004502], [1185, 3, 0.0007221796423758263, 0.036108982118791315,\n 2.22, 61.69, 0.004502], [1186, 3, 0.0024774929167619207, \n 0.12387464583809603, 2.22, 61.69, 0.004502], [1187, 3, \n 0.0006248151564821885, 0.031240757824109424, 2.22, 61.69, 0.004502], [\n 1188, 2, 0.011440868435801076, 0.5720434217900537, 0, 0, 0], [1189, 3, \n 0.001075762722956362, 0.0537881361478181, 2.22, 61.69, 0.004502], [1190,\n 2, 0.005589994050160443, 0.2794997025080222, 0, 0, 0], [1191, 2, \n 0.0018543296854580205, 0.09271648427290104, 0, 0, 0], [1196, 2, \n 0.010230349597894291, 0.5115174798947145, 0, 0, 0], [1197, 2, \n 0.005767282789943071, 0.2883641394971536, 0, 0, 0], [1198, 3, \n 0.002534966273924786, 0.12674831369623932, 2.22, 61.69, 0.004502], [\n 1199, 2, 0.012822920004466005, 0.6411460002233003, 0, 0, 0], [1200, 2, \n 0.0035658606694853635, 0.1782930334742682, 0, 0, 0], [1203, 2, \n 0.004628517197038981, 0.23142585985194902, 0, 0, 0], [1204, 3, \n 0.0023050069174568553, 0.11525034587284279, 2.22, 61.69, 0.004502], [\n 1211, 3, 0.00045660111641118554, 0.022830055820559275, 2.22, 61.69, \n 0.004502], [1212, 2, 0.002310697165483375, 0.11553485827416875, 0, 0, 0\n ], [1213, 2, 0.001571453208551938, 0.0785726604275969, 0, 0, 0], [1214,\n 3, 0.00011420293137312512, 0.005710146568656256, 2.22, 61.69, 0.004502],\n [1215, 3, 6.379928530672539e-05, 0.0031899642653362694, 2.22, 61.69, \n 0.004502], [1216, 2, 0.001869892681863531, 0.09349463409317656, 0, 0, 0\n ], [1217, 3, 0.0009267444929459551, 0.04633722464729775, 2.22, 61.69, \n 0.004502], [1218, 3, 2.5227972538599323e-05, 0.0012613986269299662, \n 2.22, 61.69, 0.004502], [1219, 3, 0.0007855588922898729, \n 0.03927794461449365, 2.22, 61.69, 0.004502], [1220, 3, \n 0.0013820054686401347, 0.06910027343200674, 2.22, 61.69, 0.004502], [\n 1221, 2, 0.015352878695497882, 0.7676439347748941, 0, 0, 0], [1222, 2, \n 0.006253768855699434, 0.3126884427849717, 0, 0, 0], [1225, 3, \n 0.0010446814701628646, 0.05223407350814323, 2.22, 61.69, 0.004502], [\n 1226, 3, 0.00014078918131144803, 0.007039459065572402, 2.22, 61.69, \n 0.004502], [1228, 3, 7.674774797726922e-05, 0.003837387398863461, 2.22,\n 61.69, 0.004502], [1229, 2, 0.00326230849376, 0.16311542468800003, 0, 0,\n 0], [1230, 3, 4.264866012739944e-05, 0.002132433006369972, 2.22, 61.69,\n 0.004502], [1231, 3, 0.0011074075337247616, 0.05537037668623808, 2.22, \n 61.69, 0.004502], [1232, 2, 0.0025289299564359583, 0.12644649782179793,\n 0, 0, 0], [1233, 2, 0.03662908231521014, 1.831454115760507, 0, 0, 0], [\n 1235, 3, 0.0005753349157073776, 0.028766745785368877, 2.22, 61.69, \n 0.004502], [1236, 2, 0.005234608320670995, 0.26173041603354974, 0, 0, 0\n ], [1237, 3, 0.0005991995096878405, 0.02995997548439202, 2.22, 61.69, \n 0.004502], [1238, 2, 0.005358277784741974, 0.26791388923709875, 0, 0, 0\n ], [1239, 3, 0.0001443666373276477, 0.007218331866382386, 2.22, 61.69, \n 0.004502], [1240, 2, 0.01429475266593171, 0.7147376332965855, 0, 0, 0],\n [1241, 2, 0.012239428692174842, 0.6119714346087421, 0, 0, 0], [1242, 3,\n 0.0009261376778324836, 0.04630688389162418, 2.22, 61.69, 0.004502], [\n 1243, 2, 0.0030479476517051274, 0.15239738258525637, 0, 0, 0], [1244, 2,\n 0.020592901244747865, 1.0296450622373932, 0, 0, 0], [1245, 3, \n 0.0003407395317026344, 0.017036976585131723, 2.22, 61.69, 0.004502], [\n 1246, 2, 0.003636870278584459, 0.18184351392922293, 0, 0, 0], [1247, 3,\n 0.0005536878435284997, 0.027684392176424988, 2.22, 61.69, 0.004502], [\n 1248, 2, 0.005854245631350222, 0.2927122815675111, 0, 0, 0], [1249, 2, \n 0.0029138707379534994, 0.14569353689767497, 0, 0, 0], [1250, 3, \n 0.0011051662697331927, 0.055258313486659626, 2.22, 61.69, 0.004502], [\n 1251, 3, 0.0006892543892280731, 0.034462719461403654, 2.22, 61.69, \n 0.004502], [1252, 3, 0.0004933226435696849, 0.02466613217848425, 2.22, \n 61.69, 0.004502], [1253, 2, 0.0033963585596517073, 0.16981792798258535,\n 0, 0, 0], [1254, 2, 0.005238024431161238, 0.2619012215580619, 0, 0, 0],\n [1255, 3, 0.0002152231180033463, 0.010761155900167315, 2.22, 61.69, \n 0.004502], [1256, 3, 0.0008829260686159954, 0.04414630343079977, 2.22, \n 61.69, 0.004502], [1257, 2, 0.005416876706065561, 0.2708438353032781, 0,\n 0, 0], [1258, 2, 0.014991588973313675, 0.7495794486656838, 0, 0, 0], [\n 1259, 2, 0.006546118673323141, 0.32730593366615707, 0, 0, 0], [1260, 3,\n 0.0008510469735902338, 0.042552348679511694, 2.22, 61.69, 0.004502], [\n 1261, 2, 0.005305411264712167, 0.2652705632356084, 0, 0, 0], [1267, 3, \n 0.0012287427693400252, 0.06143713846700125, 2.22, 61.69, 0.004502], [\n 1274, 2, 0.001691611211165477, 0.08458056055827386, 0, 0, 0], [1275, 2,\n 0.0038666125605823546, 0.19333062802911774, 0, 0, 0], [1276, 3, \n 0.0009102374698035218, 0.04551187349017608, 2.22, 61.69, 0.004502], [\n 1277, 2, 0.0023965297543892265, 0.11982648771946133, 0, 0, 0], [1278, 2,\n 0.006398316507252847, 0.31991582536264235, 0, 0, 0], [1282, 3, \n 0.0001105941762774276, 0.00552970881387138, 2.22, 61.69, 0.004502], [\n 1283, 2, 0.08261824948992594, 4.130912474496298, 0, 0, 0], [1287, 2, \n 0.003083233730049012, 0.1541616865024506, 0, 0, 0], [1288, 2, \n 0.004640611077226182, 0.23203055386130914, 0, 0, 0], [1289, 2, \n 0.004963561654090838, 0.24817808270454192, 0, 0, 0], [1290, 3, \n 0.0001244867117459489, 0.006224335587297446, 2.22, 61.69, 0.004502], [\n 1291, 2, 0.003736373434735334, 0.1868186717367667, 0, 0, 0], [1292, 3, \n 0.0011143622294130919, 0.05571811147065459, 2.22, 61.69, 0.004502], [\n 1293, 3, 8.952966571388897e-05, 0.004476483285694448, 2.22, 61.69, \n 0.004502], [1294, 3, 0.00020936993212911583, 0.010468496606455793, 2.22,\n 61.69, 0.004502], [1295, 3, 0.0002089734159756435, 0.010448670798782174,\n 2.22, 61.69, 0.004502], [1300, 3, 0.0007801536738897055, \n 0.03900768369448528, 2.22, 61.69, 0.004502], [1301, 2, \n 0.0019439262202234247, 0.09719631101117124, 0, 0, 0], [1302, 3, \n 0.00012789433882958004, 0.006394716941479003, 2.22, 61.69, 0.004502], [\n 1303, 3, 0.00010996863751682274, 0.005498431875841137, 2.22, 61.69, \n 0.004502], [1306, 3, 0.00011631130798083146, 0.005815565399041573, 2.22,\n 61.69, 0.004502], [1307, 3, 1.9031130574577255e-05, \n 0.0009515565287288628, 2.22, 61.69, 0.004502], [1308, 3, \n 0.0001224932857995621, 0.006124664289978106, 2.22, 61.69, 0.004502], [\n 1312, 2, 0.016696303623916272, 0.8348151811958137, 0, 0, 0], [1317, 3, \n 0.0015252502049763412, 0.07626251024881707, 2.22, 61.69, 0.004502], [\n 1319, 3, 0.001127343871228203, 0.05636719356141015, 2.22, 61.69, \n 0.004502], [1323, 2, 0.012675857799799822, 0.6337928899899912, 0, 0, 0],\n [1326, 2, 0.003288096915491701, 0.16440484577458506, 0, 0, 0], [1327, 2,\n 0.0032338308031027566, 0.16169154015513784, 0, 0, 0], [1328, 3, \n 0.0010226241895011407, 0.05113120947505704, 2.22, 61.69, 0.004502], [\n 1331, 3, 1.841349064624893e-05, 0.0009206745323124464, 2.22, 61.69, \n 0.004502], [1336, 3, 0.0008820603397680993, 0.04410301698840497, 2.22, \n 61.69, 0.004502], [1337, 2, 0.007722987880773172, 0.3861493940386586, 0,\n 0, 0], [1339, 3, 0.0006387594087649589, 0.03193797043824795, 2.22, \n 61.69, 0.004502], [1340, 2, 0.004462598113304154, 0.22312990566520774, \n 0, 0, 0], [1346, 2, 0.010970124373846759, 0.548506218692338, 0, 0, 0],\n [1348, 3, 0.0014456315404578254, 0.07228157702289127, 2.22, 61.69, \n 0.004502], [1349, 3, 0.0026962338610516797, 0.13481169305258398, 2.22, \n 61.69, 0.004502], [1356, 2, 0.0034369953484322496, 0.17184976742161248,\n 0, 0, 0], [1357, 2, 0.002662266539247354, 0.13311332696236772, 0, 0, 0],\n [1359, 2, 0.0023306710292170787, 0.11653355146085395, 0, 0, 0], [1360, \n 3, 0.0010909105792324338, 0.054545528961621695, 2.22, 61.69, 0.004502],\n [1361, 2, 0.0040238936307783425, 0.20119468153891715, 0, 0, 0], [1362, \n 2, 0.005036121783141224, 0.2518060891570612, 0, 0, 0], [1363, 3, \n 1.053265313635017e-06, 5.266326568175085e-05, 2.22, 61.69, 0.004502], [\n 1364, 3, 1.7153235992295212e-06, 8.576617996147605e-05, 2.22, 61.69, \n 0.004502], [1365, 3, 1.4382678391388228e-08, 7.191339195694115e-07, \n 2.22, 61.69, 0.004502], [1366, 3, 4.567454523924795e-05, \n 0.0022837272619623972, 2.22, 61.69, 0.004502], [1372, 2, \n 0.005918410111015705, 0.29592050555078525, 0, 0, 0], [1373, 3, \n 0.0010699135939801641, 0.05349567969900822, 2.22, 61.69, 0.004502], [\n 1374, 2, 0.006889508467327262, 0.3444754233663631, 0, 0, 0], [1375, 2, \n 0.003897629175102736, 0.1948814587551368, 0, 0, 0], [1376, 2, \n 0.011218109707548912, 0.5609054853774457, 0, 0, 0], [1377, 2, \n 0.01492085689824784, 0.7460428449123921, 0, 0, 0], [1378, 2, \n 0.014711861690612471, 0.7355930845306237, 0, 0, 0], [1379, 3, \n 4.570772978988336e-05, 0.0022853864894941682, 2.22, 61.69, 0.004502], [\n 1380, 3, 7.724465320438908e-05, 0.003862232660219454, 2.22, 61.69, \n 0.004502], [1381, 3, 5.9312910906981426e-05, 0.0029656455453490713, \n 2.22, 61.69, 0.004502], [1382, 2, 0.005563903887757258, \n 0.27819519438786294, 0, 0, 0], [1383, 2, 0.00682767638336331, \n 0.3413838191681655, 0, 0, 0], [1384, 3, 0.0002972463393517766, \n 0.014862316967588829, 2.22, 61.69, 0.004502], [1385, 3, \n 7.763953914385516e-06, 0.0003881976957192759, 2.22, 61.69, 0.004502], [\n 1386, 3, 4.2899112828393286e-05, 0.002144955641419664, 2.22, 61.69, \n 0.004502], [1387, 3, 0.00022240699424911273, 0.011120349712455638, 2.22,\n 61.69, 0.004502], [1388, 3, 5.909025672850305e-05, \n 0.0029545128364251525, 2.22, 61.69, 0.004502], [1389, 3, \n 1.3594135764164036e-05, 0.0006797067882082019, 2.22, 61.69, 0.004502],\n [1390, 3, 0.00023763846235409512, 0.011881923117704758, 2.22, 61.69, \n 0.004502], [1391, 3, 3.321367742134543e-05, 0.0016606838710672715, 2.22,\n 61.69, 0.004502], [1392, 3, 0.0012290826914265437, 0.06145413457132718,\n 2.22, 61.69, 0.004502], [1393, 3, 8.111443639320659e-05, \n 0.00405572181966033, 2.22, 61.69, 0.004502], [1394, 3, \n 6.656099436847732e-05, 0.0033280497184238656, 2.22, 61.69, 0.004502], [\n 1395, 3, 4.381412847320234e-06, 0.00021907064236601173, 2.22, 61.69, \n 0.004502], [1396, 3, 1.3808034609766036e-06, 6.904017304883018e-05, \n 2.22, 61.69, 0.004502], [1397, 3, 0.0015969317375463513, \n 0.07984658687731756, 2.22, 61.69, 0.004502], [1398, 3, \n 0.00017695743260373348, 0.008847871630186674, 2.22, 61.69, 0.004502], [\n 1399, 3, 0.0011375222056992432, 0.05687611028496216, 2.22, 61.69, \n 0.004502], [1400, 3, 7.618867997042728e-05, 0.0038094339985213638, 2.22,\n 61.69, 0.004502], [1401, 2, 0.005687529053514607, 0.28437645267573036, \n 0, 0, 0], [1402, 3, 0.001676149990745289, 0.08380749953726446, 2.22, \n 61.69, 0.004502], [1403, 2, 0.007617262031172502, 0.38086310155862513, \n 0, 0, 0], [1404, 2, 0.008581667499251882, 0.42908337496259413, 0, 0, 0],\n [1405, 3, 0.0018812625008740895, 0.09406312504370447, 2.22, 61.69, \n 0.004502], [1406, 3, 0.0006852566793279422, 0.03426283396639711, 2.22, \n 61.69, 0.004502], [1407, 3, 9.408131582260726e-06, \n 0.00047040657911303626, 2.22, 61.69, 0.004502], [1408, 3, \n 0.001981558589185328, 0.09907792945926643, 2.22, 61.69, 0.004502], [\n 1409, 3, 0.0005556437532243559, 0.027782187661217796, 2.22, 61.69, \n 0.004502], [1410, 3, 0.0018249000205853422, 0.09124500102926711, 2.22, \n 61.69, 0.004502], [1411, 3, 0.002128337887273, 0.10641689436365001, \n 2.22, 61.69, 0.004502], [1412, 3, 0.0001556187955145351, \n 0.007780939775726756, 2.22, 61.69, 0.004502], [1413, 3, \n 0.00014666682461596226, 0.007333341230798113, 2.22, 61.69, 0.004502], [\n 1414, 3, 0.000658771107384773, 0.032938555369238655, 2.22, 61.69, \n 0.004502], [1418, 2, 0.004554955356465112, 0.2277477678232556, 0, 0, 0],\n [1419, 3, 0.0015414725788113375, 0.07707362894056687, 2.22, 61.69, \n 0.004502], [1421, 3, 0.00017979168856692174, 0.008989584428346088, 2.22,\n 61.69, 0.004502], [1422, 3, 0.00012256633129127437, \n 0.006128316564563719, 2.22, 61.69, 0.004502], [1423, 3, \n 4.9296505077127586e-05, 0.0024648252538563794, 2.22, 61.69, 0.004502],\n [1424, 2, 0.01394783725195249, 0.6973918625976245, 0, 0, 0], [1425, 3, \n 0.0013602274146640447, 0.06801137073320224, 2.22, 61.69, 0.004502], [\n 1426, 2, 0.0041334084484743, 0.20667042242371503, 0, 0, 0], [1427, 2, \n 0.019959940478923573, 0.9979970239461788, 0, 0, 0], [1428, 2, \n 0.013355559786648664, 0.6677779893324334, 0, 0, 0], [1431, 2, \n 0.014493414492796078, 0.724670724639804, 0, 0, 0], [1432, 3, \n 0.0007676953741931287, 0.03838476870965644, 2.22, 61.69, 0.004502], [\n 1433, 2, 0.08207564315805406, 4.103782157902703, 0, 0, 0], [1434, 2, \n 0.006330547929406013, 0.3165273964703006, 0, 0, 0], [1435, 2, \n 0.005520334862536408, 0.2760167431268204, 0, 0, 0], [1436, 2, \n 0.006266510483771511, 0.31332552418857557, 0, 0, 0], [1437, 2, \n 0.006731984814882108, 0.3365992407441054, 0, 0, 0], [1438, 2, \n 0.0161133113991622, 0.8056655699581102, 0, 0, 0], [1439, 2, \n 0.0063091033600462575, 0.3154551680023129, 0, 0, 0], [1440, 3, \n 3.334110448446746e-05, 0.0016670552242233731, 2.22, 61.69, 0.004502], [\n 1443, 2, 0.006557506818224797, 0.3278753409112398, 0, 0, 0], [1446, 2, \n 0.024519578499182584, 1.2259789249591293, 0, 0, 0], [1447, 2, \n 0.0023268276390894026, 0.11634138195447014, 0, 0, 0], [1448, 3, \n 0.00047896583949883246, 0.023948291974941624, 2.22, 61.69, 0.004502], [\n 1449, 2, 0.006075750962706547, 0.3037875481353274, 0, 0, 0], [1450, 2, \n 0.0037724056227270084, 0.18862028113635043, 0, 0, 0], [1451, 2, \n 0.0043416728967246255, 0.21708364483623127, 0, 0, 0], [1452, 3, \n 0.0015322750739690742, 0.0766137536984537, 2.22, 61.69, 0.004502], [\n 1453, 2, 0.0016458121717638546, 0.08229060858819273, 0, 0, 0], [1454, 2,\n 0.004682929067992207, 0.2341464533996104, 0, 0, 0], [1455, 3, \n 4.166284213856912e-05, 0.0020831421069284557, 2.22, 61.69, 0.004502], [\n 1456, 2, 0.0031865889687578697, 0.15932944843789354, 0, 0, 0], [1457, 3,\n 0.00012749408723576006, 0.006374704361788003, 2.22, 61.69, 0.004502], [\n 1458, 3, 1.5673534819523866e-05, 0.0007836767409761935, 2.22, 61.69, \n 0.004502], [1459, 3, 0.00031178936740549625, 0.015589468370274815, 2.22,\n 61.69, 0.004502], [1460, 2, 0.003376889830190501, 0.16884449150952507, \n 0, 0, 0], [1461, 3, 0.001142843079861875, 0.05714215399309376, 2.22, \n 61.69, 0.004502], [1462, 3, 0.00015295973435731913, \n 0.007647986717865956, 2.22, 61.69, 0.004502], [1463, 3, \n 4.5276834778775515e-05, 0.002263841738938776, 2.22, 61.69, 0.004502], [\n 1464, 2, 0.006606826758650888, 0.33034133793254444, 0, 0, 0], [1465, 3,\n 0.0003374045759652472, 0.01687022879826236, 2.22, 61.69, 0.004502], [\n 1466, 3, 0.0003619193984034768, 0.01809596992017384, 2.22, 61.69, \n 0.004502], [1467, 3, 0.00013344536897072216, 0.006672268448536108, 2.22,\n 61.69, 0.004502], [1468, 3, 0.0015144656821575462, 0.0757232841078773, \n 2.22, 61.69, 0.004502], [1469, 2, 0.0033435340956597163, \n 0.16717670478298582, 0, 0, 0], [1470, 2, 0.005027084884666319, \n 0.2513542442333159, 0, 0, 0], [1471, 2, 0.010132763321185349, \n 0.5066381660592674, 0, 0, 0], [1472, 3, 0.0007626820845032627, \n 0.03813410422516314, 2.22, 61.69, 0.004502], [1473, 3, \n 0.0005323801851315335, 0.026619009256576683, 2.22, 61.69, 0.004502], [\n 1474, 3, 8.905977123682595e-05, 0.004452988561841298, 2.22, 61.69, \n 0.004502], [1475, 3, 2.4884191103347185e-05, 0.0012442095551673594, \n 2.22, 61.69, 0.004502], [1476, 2, 0.015946059282369706, \n 0.7973029641184852, 0, 0, 0], [1477, 3, 0.0007376196482685025, \n 0.03688098241342513, 2.22, 61.69, 0.004502], [1482, 3, \n 0.0004523453643744782, 0.02261726821872391, 2.22, 61.69, 0.004502], [\n 1483, 3, 0.0002291607516312977, 0.011458037581564884, 2.22, 61.69, \n 0.004502], [1484, 3, 1.9041073525508303e-06, 9.520536762754152e-05, \n 2.22, 61.69, 0.004502], [1485, 3, 3.5876538426778735e-05, \n 0.0017938269213389369, 2.22, 61.69, 0.004502], [1486, 3, \n 0.00018457774197472868, 0.009228887098736434, 2.22, 61.69, 0.004502], [\n 1489, 3, 7.571817467557017e-06, 0.00037859087337785094, 2.22, 61.69, \n 0.004502], [1490, 2, 0.04981318633597547, 2.4906593167987734, 0, 0, 0],\n [1491, 2, 0.0030920025676765685, 0.15460012838382842, 0, 0, 0], [1492, \n 2, 0.007177601132582883, 0.35888005662914413, 0, 0, 0], [1493, 2, \n 0.0027270697137500854, 0.13635348568750427, 0, 0, 0], [1494, 2, \n 0.016524815200932762, 0.8262407600466383, 0, 0, 0], [1495, 2, \n 0.002876525951460364, 0.1438262975730182, 0, 0, 0], [1500, 3, \n 5.0840365097147265e-06, 0.0002542018254857363, 2.22, 61.69, 0.004502],\n [1501, 3, 0.00020699592758045344, 0.010349796379022674, 2.22, 61.69, \n 0.004502], [1503, 3, 0.001165185399438024, 0.058259269971901194, 2.22, \n 61.69, 0.004502], [1504, 2, 0.004794675188738244, 0.23973375943691222, \n 0, 0, 0], [1512, 2, 0.001625501569891568, 0.08127507849457842, 0, 0, 0],\n [1513, 3, 0.0006523381548315222, 0.03261690774157611, 2.22, 61.69, \n 0.004502], [1518, 3, 3.7867310041709484e-05, 0.0018933655020854743, \n 2.22, 61.69, 0.004502], [1519, 3, 2.6282109502563467e-06, \n 0.00013141054751281735, 2.22, 61.69, 0.004502]])\n', (307797, 354704), False, 'from numpy import array\n'), ((355319, 378460), 'numpy.array', 'array', (['[[586, 1, 0], [589, 108, 0], [590, 108, 0], [593, 112, 0], [594, 114, 0], [\n 595, 115, 0], [597, 118, 0], [598, 118, 0], [599, 119, 0], [601, 119, 0\n ], [602, 121, 0], [603, 526, 0], [607, 127, 0], [608, 127, 0], [609, \n 529, 0], [610, 530, 0], [612, 493, 0], [613, 130, 0], [614, 130, 0], [\n 616, 132, 0], [617, 133, 0], [618, 133, 0], [619, 134, 0], [621, 136, 0\n ], [623, 139, 0], [624, 14, 0], [628, 142, 0], [629, 145, 0], [631, 145,\n 0], [632, 145, 0], [637, 148, 0], [638, 149, 0], [639, 150, 0], [640, \n 153, 0], [641, 155, 0], [642, 533, 0], [643, 534, 0], [646, 536, 0], [\n 647, 536, 0], [650, 166, 0], [652, 167, 0], [655, 170, 0], [657, 174, 0\n ], [658, 175, 0], [661, 177, 0], [662, 178, 0], [663, 178, 0], [666, \n 180, 0], [668, 183, 0], [670, 183, 0], [672, 185, 0], [676, 19, 0], [\n 677, 190, 0], [678, 194, 0], [679, 196, 0], [681, 197, 0], [683, 200, 0\n ], [687, 202, 0], [689, 204, 0], [691, 209, 0], [693, 21, 0], [694, 21,\n 0], [695, 210, 0], [696, 211, 0], [697, 211, 0], [698, 212, 0], [699, \n 213, 0], [700, 214, 0], [701, 215, 0], [702, 215, 0], [704, 217, 0], [\n 705, 217, 0], [707, 219, 0], [708, 221, 0], [711, 224, 0], [713, 225, 0\n ], [714, 225, 0], [716, 226, 0], [717, 227, 0], [719, 229, 0], [721, \n 545, 0], [722, 545, 0], [723, 235, 0], [724, 238, 0], [725, 239, 0], [\n 726, 240, 0], [727, 243, 0], [728, 244, 0], [730, 547, 0], [731, 548, 0\n ], [732, 247, 0], [733, 549, 0], [735, 253, 0], [736, 256, 0], [737, \n 256, 0], [738, 258, 0], [739, 264, 0], [741, 264, 0], [742, 264, 0], [\n 743, 500, 0], [745, 273, 0], [746, 273, 0], [747, 273, 0], [748, 274, 0\n ], [749, 274, 0], [750, 557, 0], [758, 286, 0], [760, 287, 0], [761, \n 288, 0], [762, 289, 0], [763, 560, 0], [765, 560, 0], [767, 292, 0], [\n 769, 293, 0], [771, 297, 0], [772, 3, 0], [774, 300, 0], [775, 300, 0],\n [776, 300, 0], [777, 300, 0], [778, 300, 0], [779, 302, 0], [781, 303, \n 0], [784, 563, 0], [785, 501, 0], [786, 31, 0], [787, 308, 0], [788, \n 311, 0], [789, 565, 0], [791, 314, 0], [792, 316, 0], [793, 318, 0], [\n 794, 319, 0], [795, 319, 0], [796, 567, 0], [798, 324, 0], [800, 326, 0\n ], [801, 327, 0], [802, 327, 0], [805, 328, 0], [806, 328, 0], [808, \n 329, 0], [809, 329, 0], [811, 568, 0], [814, 570, 0], [816, 335, 0], [\n 817, 571, 0], [818, 34, 0], [821, 338, 0], [822, 339, 0], [825, 339, 0],\n [826, 339, 0], [830, 345, 0], [833, 348, 0], [834, 572, 0], [835, 572, \n 0], [836, 572, 0], [837, 350, 0], [839, 350, 0], [840, 573, 0], [841, \n 573, 0], [843, 352, 0], [844, 352, 0], [845, 356, 0], [848, 574, 0], [\n 849, 574, 0], [850, 574, 0], [851, 575, 0], [852, 361, 0], [853, 362, 0\n ], [855, 363, 0], [856, 363, 0], [857, 365, 0], [858, 368, 0], [859, \n 368, 0], [860, 371, 0], [862, 372, 0], [863, 374, 0], [864, 374, 0], [\n 865, 375, 0], [866, 376, 0], [867, 376, 0], [869, 503, 0], [870, 503, 0\n ], [872, 378, 0], [873, 576, 0], [874, 576, 0], [875, 381, 0], [876, \n 578, 0], [877, 578, 0], [881, 388, 0], [882, 388, 0], [883, 388, 0], [\n 885, 393, 0], [886, 394, 0], [888, 397, 0], [889, 397, 0], [890, 40, 0],\n [895, 580, 0], [896, 581, 0], [897, 403, 0], [898, 403, 0], [899, 405, \n 0], [900, 405, 0], [902, 405, 0], [903, 406, 0], [905, 413, 0], [906, \n 414, 0], [907, 583, 0], [909, 417, 0], [913, 422, 0], [915, 423, 0], [\n 917, 43, 0], [918, 424, 0], [920, 428, 0], [921, 428, 0], [922, 429, 0],\n [923, 432, 0], [924, 433, 0], [925, 44, 0], [928, 435, 0], [931, 439, 0\n ], [934, 45, 0], [935, 45, 0], [936, 445, 0], [937, 447, 0], [939, 450,\n 0], [940, 451, 0], [942, 458, 0], [944, 458, 0], [945, 459, 0], [948, \n 462, 0], [950, 462, 0], [952, 47, 0], [956, 478, 0], [957, 478, 0], [\n 958, 478, 0], [959, 478, 0], [960, 479, 0], [963, 481, 0], [965, 49, 0],\n [966, 49, 0], [967, 49, 0], [968, 486, 0], [969, 486, 0], [971, 51, 0],\n [972, 506, 0], [973, 506, 0], [975, 58, 0], [976, 58, 0], [977, 59, 0],\n [978, 491, 0], [981, 62, 0], [982, 62, 0], [983, 62, 0], [984, 63, 0],\n [985, 63, 0], [986, 64, 0], [987, 65, 0], [988, 66, 0], [990, 67, 0], [\n 993, 67, 0], [994, 67, 0], [995, 509, 0], [996, 510, 0], [997, 510, 0],\n [998, 70, 0], [999, 70, 0], [1000, 71, 0], [1002, 71, 0], [1003, 72, 0],\n [1007, 511, 0], [1008, 75, 0], [1010, 79, 0], [1011, 79, 0], [1012, 81,\n 0], [1018, 514, 0], [1019, 514, 0], [1023, 515, 0], [1025, 518, 0], [\n 1026, 518, 0], [1027, 218, 0], [1028, 221, 0], [1029, 268, 0], [1030, \n 269, 0], [1031, 498, 0], [1032, 1, 0], [1033, 3, 0], [1034, 4, 0], [\n 1035, 6, 0], [1036, 7, 0], [1037, 8, 0], [1038, 9, 0], [1039, 11, 0], [\n 1041, 16, 0], [1042, 17, 0], [1044, 21, 0], [1046, 25, 0], [1047, 27, 0\n ], [1048, 28, 0], [1049, 29, 0], [1050, 31, 0], [1051, 33, 0], [1052, \n 34, 0], [1053, 35, 0], [1054, 36, 0], [1055, 38, 0], [1056, 39, 0], [\n 1057, 40, 0], [1058, 41, 0], [1059, 43, 0], [1060, 44, 0], [1061, 45, 0\n ], [1062, 47, 0], [1063, 48, 0], [1064, 49, 0], [1065, 50, 0], [1066, \n 51, 0], [1067, 53, 0], [1072, 59, 0], [1073, 60, 0], [1074, 62, 0], [\n 1077, 65, 0], [1079, 67, 0], [1080, 70, 0], [1081, 71, 0], [1082, 72, 0\n ], [1083, 73, 0], [1084, 75, 0], [1085, 76, 0], [1086, 77, 0], [1087, \n 79, 0], [1088, 80, 0], [1089, 81, 0], [1090, 82, 0], [1091, 83, 0], [\n 1092, 84, 0], [1093, 85, 0], [1094, 88, 0], [1095, 89, 0], [1096, 90, 0\n ], [1097, 91, 0], [1098, 92, 0], [1099, 93, 0], [1101, 98, 0], [1102, \n 101, 0], [1103, 102, 0], [1104, 103, 0], [1105, 108, 0], [1106, 109, 0],\n [1107, 110, 0], [1108, 111, 0], [1109, 112, 0], [1110, 113, 0], [1111, \n 114, 0], [1112, 115, 0], [1113, 116, 0], [1114, 118, 0], [1115, 119, 0],\n [1116, 121, 0], [1117, 122, 0], [1118, 126, 0], [1119, 127, 0], [1120, \n 130, 0], [1121, 131, 0], [1122, 132, 0], [1123, 133, 0], [1124, 134, 0],\n [1125, 135, 0], [1126, 136, 0], [1127, 137, 0], [1128, 139, 0], [1129, \n 140, 0], [1130, 141, 0], [1131, 142, 0], [1132, 144, 0], [1133, 145, 0],\n [1134, 146, 0], [1135, 147, 0], [1136, 148, 0], [1137, 149, 0], [1138, \n 150, 0], [1139, 151, 0], [1140, 152, 0], [1141, 153, 0], [1142, 154, 0],\n [1143, 155, 0], [1144, 158, 0], [1145, 161, 0], [1146, 162, 0], [1147, \n 163, 0], [1148, 164, 0], [1149, 166, 0], [1150, 167, 0], [1151, 168, 0],\n [1152, 169, 0], [1153, 170, 0], [1154, 171, 0], [1155, 172, 0], [1156, \n 173, 0], [1157, 174, 0], [1158, 175, 0], [1159, 176, 0], [1160, 177, 0],\n [1161, 178, 0], [1162, 179, 0], [1163, 180, 0], [1164, 181, 0], [1165, \n 182, 0], [1166, 183, 0], [1167, 185, 0], [1168, 186, 0], [1169, 187, 0],\n [1170, 188, 0], [1173, 192, 0], [1174, 193, 0], [1175, 194, 0], [1176, \n 196, 0], [1177, 197, 0], [1178, 198, 0], [1179, 199, 0], [1180, 200, 0],\n [1181, 202, 0], [1182, 203, 0], [1183, 204, 0], [1184, 205, 0], [1185, \n 206, 0], [1186, 207, 0], [1187, 208, 0], [1188, 209, 0], [1189, 210, 0],\n [1190, 211, 0], [1191, 212, 0], [1196, 217, 0], [1197, 218, 0], [1198, \n 219, 0], [1199, 221, 0], [1200, 222, 0], [1203, 225, 0], [1204, 226, 0],\n [1211, 237, 0], [1212, 238, 0], [1213, 239, 0], [1214, 240, 0], [1215, \n 241, 0], [1216, 242, 0], [1217, 243, 0], [1218, 244, 0], [1219, 247, 0],\n [1220, 251, 0], [1221, 252, 0], [1222, 253, 0], [1225, 256, 0], [1226, \n 257, 0], [1228, 260, 0], [1229, 263, 0], [1230, 264, 0], [1231, 266, 0],\n [1232, 267, 0], [1233, 268, 0], [1235, 271, 0], [1236, 272, 0], [1237, \n 273, 0], [1238, 274, 0], [1239, 275, 0], [1240, 276, 0], [1241, 278, 0],\n [1242, 281, 0], [1243, 282, 0], [1244, 283, 0], [1245, 284, 0], [1246, \n 285, 0], [1247, 286, 0], [1248, 287, 0], [1249, 288, 0], [1250, 289, 0],\n [1251, 291, 0], [1252, 292, 0], [1253, 293, 0], [1254, 294, 0], [1255, \n 295, 0], [1256, 296, 0], [1257, 297, 0], [1258, 298, 0], [1259, 299, 0],\n [1260, 300, 0], [1261, 302, 0], [1267, 311, 0], [1274, 321, 0], [1275, \n 322, 0], [1276, 323, 0], [1277, 324, 0], [1278, 325, 0], [1282, 329, 0],\n [1283, 331, 0], [1287, 338, 0], [1288, 339, 0], [1289, 340, 0], [1290, \n 341, 0], [1291, 342, 0], [1292, 343, 0], [1293, 344, 0], [1294, 345, 0],\n [1295, 346, 0], [1300, 353, 0], [1301, 354, 0], [1302, 355, 0], [1303, \n 356, 0], [1306, 361, 0], [1307, 362, 0], [1308, 363, 0], [1312, 367, 0],\n [1317, 372, 0], [1319, 374, 0], [1323, 378, 0], [1326, 384, 0], [1327, \n 385, 0], [1328, 386, 0], [1331, 390, 0], [1336, 395, 0], [1337, 396, 0],\n [1339, 398, 0], [1340, 399, 0], [1346, 407, 0], [1348, 410, 0], [1349, \n 411, 0], [1356, 419, 0], [1357, 420, 0], [1359, 422, 0], [1360, 423, 0],\n [1361, 424, 0], [1362, 425, 0], [1363, 426, 0], [1364, 427, 0], [1365, \n 428, 0], [1366, 429, 0], [1372, 435, 0], [1373, 436, 0], [1374, 437, 0],\n [1375, 438, 0], [1376, 439, 0], [1377, 440, 0], [1378, 441, 0], [1379, \n 442, 0], [1380, 443, 0], [1381, 445, 0], [1382, 446, 0], [1383, 447, 0],\n [1384, 448, 0], [1385, 449, 0], [1386, 450, 0], [1387, 451, 0], [1388, \n 453, 0], [1389, 454, 0], [1390, 455, 0], [1391, 456, 0], [1392, 457, 0],\n [1393, 458, 0], [1394, 459, 0], [1395, 460, 0], [1396, 461, 0], [1397, \n 462, 0], [1398, 463, 0], [1399, 464, 0], [1400, 465, 0], [1401, 466, 0],\n [1402, 467, 0], [1403, 468, 0], [1404, 469, 0], [1405, 470, 0], [1406, \n 471, 0], [1407, 472, 0], [1408, 473, 0], [1409, 474, 0], [1410, 475, 0],\n [1411, 476, 0], [1412, 477, 0], [1413, 478, 0], [1414, 479, 0], [1418, \n 483, 0], [1419, 484, 0], [1421, 486, 0], [1422, 487, 0], [1423, 488, 0],\n [1424, 489, 0], [1425, 490, 0], [1426, 491, 0], [1427, 492, 0], [1428, \n 493, 0], [1431, 496, 0], [1432, 497, 0], [1433, 498, 0], [1434, 499, 0],\n [1435, 500, 0], [1436, 501, 0], [1437, 502, 0], [1438, 503, 0], [1439, \n 504, 0], [1440, 505, 0], [1443, 508, 0], [1446, 511, 0], [1447, 512, 0],\n [1448, 513, 0], [1449, 514, 0], [1450, 515, 0], [1451, 516, 0], [1452, \n 517, 0], [1453, 518, 0], [1454, 519, 0], [1455, 520, 0], [1456, 521, 0],\n [1457, 522, 0], [1458, 523, 0], [1459, 524, 0], [1460, 525, 0], [1461, \n 526, 0], [1462, 527, 0], [1463, 528, 0], [1464, 529, 0], [1465, 530, 0],\n [1466, 531, 0], [1467, 532, 0], [1468, 533, 0], [1469, 534, 0], [1470, \n 535, 0], [1471, 536, 0], [1472, 537, 0], [1473, 538, 0], [1474, 539, 0],\n [1475, 540, 0], [1476, 541, 0], [1477, 542, 0], [1482, 547, 0], [1483, \n 548, 0], [1484, 549, 0], [1485, 550, 0], [1486, 551, 0], [1489, 555, 0],\n [1490, 556, 0], [1491, 557, 0], [1492, 558, 0], [1493, 559, 0], [1494, \n 560, 0], [1495, 561, 0], [1500, 566, 0], [1501, 567, 0], [1503, 569, 0],\n [1504, 570, 0], [1512, 578, 0], [1513, 579, 0], [1518, 584, 0], [1519, \n 585, 0], [1, 490, 0], [3, 4, 1], [491, 6, 0], [7, 5, 0], [8, 9, 0], [\n 492, 11, 0], [11, 493, 0], [492, 493, 1], [494, 14, 0], [13, 15, 0], [\n 16, 5, 0], [17, 18, 1], [17, 12, 0], [14, 495, 0], [494, 19, 0], [20, \n 21, 0], [20, 22, 1], [497, 23, 0], [23, 499, 1], [25, 26, 0], [25, 22, \n 0], [23, 27, 0], [28, 23, 0], [8, 21, 0], [9, 29, 0], [30, 25, 1], [31,\n 32, 1], [32, 33, 1], [34, 35, 0], [35, 36, 0], [490, 6, 1], [37, 10, 1],\n [10, 38, 0], [37, 38, 1], [39, 40, 1], [39, 41, 1], [42, 41, 1], [18, \n 42, 1], [492, 43, 1], [44, 45, 0], [44, 505, 0], [46, 12, 0], [47, 48, \n 0], [49, 50, 0], [31, 33, 1], [31, 51, 0], [52, 53, 1], [52, 54, 0], [\n 506, 55, 0], [506, 507, 1], [57, 506, 0], [57, 58, 0], [58, 506, 0], [\n 59, 60, 1], [508, 62, 0], [30, 61, 1], [63, 506, 0], [13, 64, 0], [65, \n 66, 1], [59, 67, 0], [61, 67, 0], [68, 69, 1], [70, 69, 1], [71, 72, 1],\n [73, 74, 1], [37, 75, 1], [72, 75, 0], [37, 72, 1], [76, 77, 1], [77, \n 51, 0], [73, 72, 1], [18, 40, 1], [492, 45, 1], [10, 74, 1], [45, 511, \n 1], [78, 32, 1], [79, 80, 0], [81, 79, 1], [34, 82, 0], [83, 84, 0], [\n 83, 499, 0], [85, 86, 0], [87, 86, 1], [88, 89, 0], [90, 86, 1], [91, \n 86, 0], [86, 92, 0], [86, 93, 0], [94, 86, 1], [86, 95, 1], [513, 517, \n 0], [97, 66, 1], [42, 98, 0], [99, 100, 1], [42, 101, 0], [102, 42, 1],\n [103, 87, 0], [104, 103, 0], [105, 87, 0], [106, 107, 0], [108, 107, 0],\n [109, 106, 0], [110, 111, 1], [87, 112, 0], [113, 87, 0], [87, 85, 1],\n [110, 114, 1], [115, 116, 0], [117, 118, 0], [117, 119, 0], [117, 120, \n 1], [121, 122, 0], [123, 124, 0], [125, 126, 0], [127, 119, 0], [118, \n 128, 0], [121, 119, 0], [530, 527, 0], [125, 130, 0], [125, 123, 0], [\n 131, 132, 0], [133, 123, 0], [524, 134, 0], [135, 136, 0], [123, 131, 0\n ], [117, 128, 1], [137, 521, 0], [531, 514, 0], [139, 521, 0], [140, \n 514, 0], [522, 141, 0], [142, 523, 0], [530, 526, 0], [140, 532, 0], [\n 142, 144, 0], [140, 522, 0], [145, 146, 0], [147, 523, 0], [144, 523, 0\n ], [139, 523, 0], [140, 141, 0], [528, 526, 0], [528, 148, 0], [149, \n 150, 0], [145, 528, 0], [530, 151, 0], [524, 152, 0], [149, 525, 1], [\n 139, 514, 0], [126, 120, 1], [530, 153, 0], [528, 147, 1], [528, 154, 0\n ], [130, 120, 1], [528, 155, 1], [524, 533, 0], [524, 149, 0], [154, \n 150, 0], [157, 110, 1], [119, 158, 0], [159, 60, 0], [536, 161, 0], [\n 115, 151, 0], [162, 134, 0], [115, 526, 0], [138, 87, 0], [123, 163, 0],\n [112, 164, 0], [112, 165, 0], [166, 165, 0], [167, 537, 0], [168, 104, \n 0], [531, 520, 0], [139, 520, 0], [520, 169, 0], [168, 105, 0], [520, \n 170, 0], [171, 89, 0], [521, 172, 0], [123, 173, 0], [521, 174, 0], [37,\n 39, 0], [530, 175, 0], [530, 176, 0], [88, 530, 0], [177, 496, 1], [178,\n 525, 0], [179, 493, 1], [180, 181, 1], [182, 180, 0], [179, 181, 0], [\n 180, 493, 1], [183, 30, 0], [183, 21, 0], [538, 185, 0], [538, 89, 0],\n [184, 186, 0], [184, 187, 0], [520, 172, 0], [89, 175, 0], [185, 89, 0],\n [89, 188, 0], [189, 190, 0], [539, 172, 0], [504, 192, 0], [105, 186, 0\n ], [105, 187, 0], [539, 193, 0], [187, 194, 0], [539, 540, 0], [539, \n 196, 0], [197, 540, 0], [110, 198, 0], [197, 539, 0], [199, 537, 0], [\n 134, 526, 0], [200, 193, 0], [4, 201, 1], [202, 86, 0], [85, 203, 0], [\n 147, 204, 0], [147, 205, 0], [123, 206, 0], [537, 207, 0], [165, 208, 0\n ], [4, 94, 1], [4, 2, 0], [209, 4, 0], [119, 163, 0], [210, 3, 0], [99,\n 211, 0], [99, 69, 1], [212, 99, 0], [213, 214, 0], [510, 215, 0], [128,\n 69, 1], [216, 69, 1], [217, 98, 0], [504, 218, 0], [177, 504, 1], [219,\n 209, 0], [219, 220, 0], [94, 95, 1], [159, 221, 1], [34, 161, 0], [222,\n 221, 0], [211, 52, 1], [215, 223, 1], [224, 215, 0], [225, 224, 1], [\n 224, 223, 0], [226, 6, 0], [7, 3, 1], [216, 227, 1], [228, 229, 0], [\n 227, 230, 0], [231, 53, 1], [544, 545, 0], [234, 235, 1], [546, 214, 1],\n [233, 227, 0], [237, 238, 0], [212, 100, 0], [519, 239, 0], [238, 519, \n 0], [213, 240, 0], [241, 242, 1], [70, 241, 0], [509, 213, 0], [68, 243,\n 0], [243, 244, 0], [68, 244, 0], [544, 547, 1], [245, 227, 1], [246, \n 208, 0], [112, 208, 0], [165, 247, 0], [537, 549, 0], [537, 550, 0], [\n 537, 551, 0], [110, 251, 0], [510, 252, 1], [529, 253, 1], [237, 239, 1\n ], [254, 238, 1], [69, 255, 0], [510, 225, 1], [256, 257, 0], [258, 190,\n 0], [258, 259, 0], [260, 261, 1], [554, 553, 1], [515, 263, 0], [14, \n 264, 1], [116, 555, 0], [151, 116, 0], [111, 114, 1], [77, 111, 0], [\n 266, 525, 0], [267, 120, 1], [268, 269, 0], [556, 271, 0], [556, 272, 0\n ], [529, 273, 0], [128, 274, 0], [34, 275, 0], [503, 276, 0], [503, 504,\n 1], [177, 218, 1], [277, 278, 1], [557, 558, 1], [557, 559, 1], [559, \n 558, 1], [277, 78, 1], [277, 279, 1], [78, 279, 0], [281, 282, 0], [283,\n 161, 1], [268, 161, 1], [256, 284, 0], [515, 516, 1], [263, 516, 0], [\n 516, 285, 0], [63, 286, 0], [287, 516, 0], [8, 102, 1], [8, 101, 1], [\n 80, 288, 0], [80, 289, 0], [276, 560, 0], [37, 290, 0], [290, 74, 1], [\n 512, 291, 0], [78, 292, 1], [199, 548, 0], [491, 293, 0], [4, 294, 0],\n [490, 541, 1], [491, 295, 0], [491, 296, 0], [295, 297, 0], [508, 161, \n 0], [117, 123, 0], [133, 117, 0], [71, 74, 1], [74, 278, 1], [298, 515,\n 0], [5, 299, 0], [32, 292, 1], [5, 29, 1], [503, 560, 0], [300, 301, 1],\n [51, 300, 0], [244, 302, 1], [31, 302, 1], [51, 282, 1], [303, 304, 0],\n [305, 304, 0], [305, 259, 0], [306, 307, 1], [305, 308, 0], [305, 309, \n 0], [310, 309, 1], [306, 309, 1], [311, 280, 0], [280, 278, 1], [311, \n 32, 1], [13, 312, 1], [313, 314, 0], [312, 313, 1], [547, 566, 1], [245,\n 315, 1], [312, 316, 0], [312, 314, 0], [554, 546, 1], [262, 216, 1], [\n 317, 233, 0], [318, 317, 0], [231, 52, 1], [319, 567, 0], [557, 321, 0],\n [277, 65, 1], [322, 288, 1], [322, 323, 0], [277, 324, 1], [324, 325, 0\n ], [277, 325, 0], [326, 327, 0], [328, 326, 1], [328, 327, 1], [326, \n 329, 0], [568, 329, 1], [568, 326, 0], [332, 78, 1], [333, 306, 0], [\n 332, 333, 0], [332, 334, 0], [66, 334, 1], [330, 335, 1], [336, 66, 0],\n [330, 336, 1], [68, 70, 0], [509, 337, 1], [324, 288, 0], [338, 559, 0],\n [339, 559, 0], [339, 340, 1], [559, 340, 1], [341, 292, 0], [557, 342, \n 0], [558, 343, 0], [502, 340, 1], [72, 32, 1], [344, 345, 0], [346, 47,\n 0], [46, 47, 0], [346, 345, 0], [347, 328, 0], [347, 348, 1], [571, 348,\n 1], [347, 572, 0], [571, 570, 1], [14, 350, 0], [350, 573, 0], [15, 351,\n 1], [352, 15, 0], [15, 335, 1], [232, 227, 0], [565, 544, 1], [235, 567,\n 1], [567, 286, 0], [353, 519, 0], [354, 353, 0], [355, 354, 0], [354, \n 356, 0], [357, 358, 0], [574, 359, 0], [235, 575, 0], [167, 361, 0], [\n 528, 362, 0], [363, 344, 0], [259, 364, 1], [54, 56, 0], [365, 364, 0],\n [231, 366, 0], [30, 367, 0], [61, 367, 1], [254, 368, 0], [254, 369, 0],\n [254, 370, 0], [99, 358, 0], [354, 519, 0], [571, 371, 0], [207, 372, 0\n ], [57, 373, 0], [209, 374, 0], [375, 376, 0], [376, 377, 0], [16, 49, \n 0], [318, 377, 0], [378, 297, 0], [562, 379, 0], [576, 563, 0], [576, \n 381, 0], [577, 576, 1], [244, 383, 0], [244, 306, 1], [383, 306, 1], [\n 380, 306, 0], [252, 225, 0], [220, 76, 0], [542, 384, 0], [385, 384, 0],\n [542, 385, 0], [386, 385, 0], [387, 578, 0], [332, 388, 1], [382, 332, \n 1], [382, 388, 0], [579, 578, 0], [577, 387, 1], [144, 390, 0], [37, 49,\n 0], [391, 233, 0], [392, 310, 0], [260, 393, 0], [394, 230, 0], [395, \n 282, 1], [395, 244, 0], [25, 396, 1], [81, 74, 0], [278, 80, 1], [81, \n 278, 1], [569, 570, 0], [397, 552, 0], [542, 398, 0], [398, 385, 0], [\n 399, 499, 0], [83, 399, 0], [498, 400, 0], [518, 239, 1], [575, 543, 0],\n [401, 360, 0], [580, 581, 0], [401, 402, 0], [403, 231, 0], [189, 360, \n 1], [234, 404, 0], [235, 404, 1], [235, 580, 0], [216, 259, 0], [405, \n 259, 0], [405, 318, 0], [406, 230, 0], [542, 407, 0], [23, 408, 0], [\n 577, 348, 0], [562, 564, 1], [582, 507, 0], [27, 410, 0], [501, 27, 0],\n [27, 411, 0], [411, 410, 0], [403, 360, 0], [412, 360, 0], [326, 413, 0\n ], [414, 413, 0], [6, 297, 0], [554, 580, 1], [262, 401, 1], [499, 556,\n 1], [224, 229, 0], [583, 507, 0], [415, 307, 0], [416, 507, 0], [284, \n 561, 0], [543, 417, 0], [418, 506, 0], [220, 157, 0], [295, 419, 0], [\n 295, 420, 0], [541, 62, 0], [52, 421, 0], [60, 160, 0], [535, 161, 0],\n [267, 282, 0], [52, 365, 0], [28, 27, 0], [30, 201, 1], [422, 81, 0], [\n 119, 425, 0], [423, 425, 0], [424, 425, 0], [426, 428, 0], [427, 428, 0\n ], [19, 428, 1], [45, 429, 0], [44, 429, 0], [505, 429, 0], [231, 431, \n 1], [190, 431, 1], [430, 431, 0], [286, 433, 0], [432, 433, 0], [506, \n 433, 0], [23, 434, 0], [400, 434, 0], [500, 434, 0], [32, 436, 0], [435,\n 436, 0], [78, 436, 1], [86, 438, 1], [437, 438, 0], [221, 438, 0], [207,\n 439, 0], [516, 439, 0], [513, 439, 0], [181, 441, 1], [440, 441, 0], [\n 504, 441, 1], [135, 442, 0], [109, 442, 0], [112, 442, 0], [113, 443, 0\n ], [132, 443, 0], [107, 443, 0], [444, 445, 0], [112, 445, 0], [109, \n 445, 0], [119, 447, 1], [100, 447, 1], [446, 447, 0], [124, 448, 0], [\n 125, 448, 0], [131, 448, 0], [449, 450, 0], [173, 450, 0], [184, 450, 0\n ], [144, 451, 0], [140, 451, 0], [514, 451, 0], [537, 585, 1], [141, \n 585, 0], [584, 585, 0], [522, 454, 0], [144, 454, 0], [453, 454, 0], [\n 199, 456, 0], [140, 456, 0], [455, 456, 0], [537, 456, 0], [538, 457, 0\n ], [153, 457, 0], [176, 457, 0], [524, 459, 0], [458, 459, 0], [134, \n 459, 0], [460, 461, 0], [150, 461, 0], [149, 461, 0], [521, 463, 0], [\n 462, 463, 0], [538, 463, 0], [110, 464, 0], [90, 464, 0], [165, 464, 0],\n [458, 465, 0], [134, 465, 0], [524, 465, 0], [466, 467, 0], [110, 467, \n 0], [165, 467, 0], [468, 469, 0], [541, 469, 0], [490, 469, 0], [263, \n 471, 0], [470, 471, 0], [534, 471, 0], [136, 472, 0], [110, 472, 0], [\n 251, 472, 0], [226, 474, 0], [473, 474, 0], [257, 474, 0], [6, 474, 1],\n [299, 475, 1], [3, 475, 0], [210, 475, 0], [297, 476, 0], [296, 476, 0],\n [295, 476, 0], [313, 478, 1], [477, 478, 0], [245, 478, 0], [479, 481, \n 0], [565, 481, 0], [480, 481, 0], [415, 482, 0], [56, 482, 0], [409, \n 482, 0], [483, 484, 0], [3, 484, 0], [301, 484, 0], [233, 485, 0], [392,\n 485, 0], [391, 485, 0], [579, 488, 0], [486, 488, 0], [487, 488, 0], [\n 270, 489, 0], [331, 489, 0], [396, 489, 1], [519, 253, 0], [382, 349, 1\n ], [349, 351, 0], [459, 465, 0], [549, 550, 0], [550, 551, 0], [194, \n 195, 0], [247, 248, 0], [2, 294, 0], [549, 551, 0], [54, 365, 0], [131,\n 265, 0], [91, 92, 0], [247, 249, 0], [186, 191, 0], [129, 173, 0], [96,\n 202, 0], [53, 320, 0], [24, 396, 0], [133, 156, 0], [442, 452, 0], [445,\n 452, 0], [247, 250, 0], [187, 195, 0], [216, 236, 0], [244, 389, 0], [\n 394, 406, 0], [442, 445, 0], [442, 444, 0], [198, 472, 0], [464, 467, 0\n ], [198, 251, 0], [112, 143, 0], [2, 490, 0], [5, 491, 0], [10, 492, 0],\n [12, 493, 0], [13, 494, 0], [15, 495, 0], [18, 496, 0], [20, 497, 0], [\n 22, 498, 0], [24, 499, 0], [26, 500, 0], [30, 501, 0], [32, 502, 0], [\n 37, 503, 0], [42, 504, 0], [46, 505, 0], [52, 506, 0], [56, 507, 0], [\n 61, 508, 0], [68, 509, 0], [69, 510, 0], [74, 511, 0], [78, 512, 0], [\n 86, 513, 0], [87, 514, 0], [94, 515, 0], [95, 516, 0], [96, 517, 0], [\n 99, 518, 0], [100, 519, 0], [104, 520, 0], [105, 521, 0], [106, 522, 0],\n [107, 523, 0], [117, 524, 0], [120, 525, 0], [123, 526, 0], [124, 527, \n 0], [125, 528, 0], [128, 529, 0], [129, 530, 0], [138, 531, 0], [143, \n 532, 0], [156, 533, 0], [157, 534, 0], [159, 535, 0], [160, 536, 0], [\n 165, 537, 0], [184, 538, 0], [191, 539, 0], [195, 540, 0], [201, 541, 0\n ], [220, 542, 0], [231, 543, 0], [232, 544, 0], [233, 545, 0], [236, \n 546, 0], [245, 547, 0], [246, 548, 0], [248, 549, 0], [249, 550, 0], [\n 250, 551, 0], [259, 552, 0], [261, 553, 0], [262, 554, 0], [265, 555, 0\n ], [270, 556, 0], [277, 557, 0], [279, 558, 0], [280, 559, 0], [290, \n 560, 0], [301, 561, 0], [305, 562, 0], [306, 563, 0], [310, 564, 0], [\n 313, 565, 0], [315, 566, 0], [320, 567, 0], [330, 568, 0], [332, 569, 0\n ], [334, 570, 0], [336, 571, 0], [349, 572, 0], [351, 573, 0], [358, \n 574, 0], [360, 575, 0], [380, 576, 0], [382, 577, 0], [383, 578, 0], [\n 389, 579, 0], [401, 580, 0], [402, 581, 0], [409, 582, 0], [415, 583, 0\n ], [444, 584, 0], [452, 585, 0]]'], {}), '([[586, 1, 0], [589, 108, 0], [590, 108, 0], [593, 112, 0], [594, 114,\n 0], [595, 115, 0], [597, 118, 0], [598, 118, 0], [599, 119, 0], [601, \n 119, 0], [602, 121, 0], [603, 526, 0], [607, 127, 0], [608, 127, 0], [\n 609, 529, 0], [610, 530, 0], [612, 493, 0], [613, 130, 0], [614, 130, 0\n ], [616, 132, 0], [617, 133, 0], [618, 133, 0], [619, 134, 0], [621, \n 136, 0], [623, 139, 0], [624, 14, 0], [628, 142, 0], [629, 145, 0], [\n 631, 145, 0], [632, 145, 0], [637, 148, 0], [638, 149, 0], [639, 150, 0\n ], [640, 153, 0], [641, 155, 0], [642, 533, 0], [643, 534, 0], [646, \n 536, 0], [647, 536, 0], [650, 166, 0], [652, 167, 0], [655, 170, 0], [\n 657, 174, 0], [658, 175, 0], [661, 177, 0], [662, 178, 0], [663, 178, 0\n ], [666, 180, 0], [668, 183, 0], [670, 183, 0], [672, 185, 0], [676, 19,\n 0], [677, 190, 0], [678, 194, 0], [679, 196, 0], [681, 197, 0], [683, \n 200, 0], [687, 202, 0], [689, 204, 0], [691, 209, 0], [693, 21, 0], [\n 694, 21, 0], [695, 210, 0], [696, 211, 0], [697, 211, 0], [698, 212, 0],\n [699, 213, 0], [700, 214, 0], [701, 215, 0], [702, 215, 0], [704, 217, \n 0], [705, 217, 0], [707, 219, 0], [708, 221, 0], [711, 224, 0], [713, \n 225, 0], [714, 225, 0], [716, 226, 0], [717, 227, 0], [719, 229, 0], [\n 721, 545, 0], [722, 545, 0], [723, 235, 0], [724, 238, 0], [725, 239, 0\n ], [726, 240, 0], [727, 243, 0], [728, 244, 0], [730, 547, 0], [731, \n 548, 0], [732, 247, 0], [733, 549, 0], [735, 253, 0], [736, 256, 0], [\n 737, 256, 0], [738, 258, 0], [739, 264, 0], [741, 264, 0], [742, 264, 0\n ], [743, 500, 0], [745, 273, 0], [746, 273, 0], [747, 273, 0], [748, \n 274, 0], [749, 274, 0], [750, 557, 0], [758, 286, 0], [760, 287, 0], [\n 761, 288, 0], [762, 289, 0], [763, 560, 0], [765, 560, 0], [767, 292, 0\n ], [769, 293, 0], [771, 297, 0], [772, 3, 0], [774, 300, 0], [775, 300,\n 0], [776, 300, 0], [777, 300, 0], [778, 300, 0], [779, 302, 0], [781, \n 303, 0], [784, 563, 0], [785, 501, 0], [786, 31, 0], [787, 308, 0], [\n 788, 311, 0], [789, 565, 0], [791, 314, 0], [792, 316, 0], [793, 318, 0\n ], [794, 319, 0], [795, 319, 0], [796, 567, 0], [798, 324, 0], [800, \n 326, 0], [801, 327, 0], [802, 327, 0], [805, 328, 0], [806, 328, 0], [\n 808, 329, 0], [809, 329, 0], [811, 568, 0], [814, 570, 0], [816, 335, 0\n ], [817, 571, 0], [818, 34, 0], [821, 338, 0], [822, 339, 0], [825, 339,\n 0], [826, 339, 0], [830, 345, 0], [833, 348, 0], [834, 572, 0], [835, \n 572, 0], [836, 572, 0], [837, 350, 0], [839, 350, 0], [840, 573, 0], [\n 841, 573, 0], [843, 352, 0], [844, 352, 0], [845, 356, 0], [848, 574, 0\n ], [849, 574, 0], [850, 574, 0], [851, 575, 0], [852, 361, 0], [853, \n 362, 0], [855, 363, 0], [856, 363, 0], [857, 365, 0], [858, 368, 0], [\n 859, 368, 0], [860, 371, 0], [862, 372, 0], [863, 374, 0], [864, 374, 0\n ], [865, 375, 0], [866, 376, 0], [867, 376, 0], [869, 503, 0], [870, \n 503, 0], [872, 378, 0], [873, 576, 0], [874, 576, 0], [875, 381, 0], [\n 876, 578, 0], [877, 578, 0], [881, 388, 0], [882, 388, 0], [883, 388, 0\n ], [885, 393, 0], [886, 394, 0], [888, 397, 0], [889, 397, 0], [890, 40,\n 0], [895, 580, 0], [896, 581, 0], [897, 403, 0], [898, 403, 0], [899, \n 405, 0], [900, 405, 0], [902, 405, 0], [903, 406, 0], [905, 413, 0], [\n 906, 414, 0], [907, 583, 0], [909, 417, 0], [913, 422, 0], [915, 423, 0\n ], [917, 43, 0], [918, 424, 0], [920, 428, 0], [921, 428, 0], [922, 429,\n 0], [923, 432, 0], [924, 433, 0], [925, 44, 0], [928, 435, 0], [931, \n 439, 0], [934, 45, 0], [935, 45, 0], [936, 445, 0], [937, 447, 0], [939,\n 450, 0], [940, 451, 0], [942, 458, 0], [944, 458, 0], [945, 459, 0], [\n 948, 462, 0], [950, 462, 0], [952, 47, 0], [956, 478, 0], [957, 478, 0],\n [958, 478, 0], [959, 478, 0], [960, 479, 0], [963, 481, 0], [965, 49, 0\n ], [966, 49, 0], [967, 49, 0], [968, 486, 0], [969, 486, 0], [971, 51, \n 0], [972, 506, 0], [973, 506, 0], [975, 58, 0], [976, 58, 0], [977, 59,\n 0], [978, 491, 0], [981, 62, 0], [982, 62, 0], [983, 62, 0], [984, 63, \n 0], [985, 63, 0], [986, 64, 0], [987, 65, 0], [988, 66, 0], [990, 67, 0\n ], [993, 67, 0], [994, 67, 0], [995, 509, 0], [996, 510, 0], [997, 510,\n 0], [998, 70, 0], [999, 70, 0], [1000, 71, 0], [1002, 71, 0], [1003, 72,\n 0], [1007, 511, 0], [1008, 75, 0], [1010, 79, 0], [1011, 79, 0], [1012,\n 81, 0], [1018, 514, 0], [1019, 514, 0], [1023, 515, 0], [1025, 518, 0],\n [1026, 518, 0], [1027, 218, 0], [1028, 221, 0], [1029, 268, 0], [1030, \n 269, 0], [1031, 498, 0], [1032, 1, 0], [1033, 3, 0], [1034, 4, 0], [\n 1035, 6, 0], [1036, 7, 0], [1037, 8, 0], [1038, 9, 0], [1039, 11, 0], [\n 1041, 16, 0], [1042, 17, 0], [1044, 21, 0], [1046, 25, 0], [1047, 27, 0\n ], [1048, 28, 0], [1049, 29, 0], [1050, 31, 0], [1051, 33, 0], [1052, \n 34, 0], [1053, 35, 0], [1054, 36, 0], [1055, 38, 0], [1056, 39, 0], [\n 1057, 40, 0], [1058, 41, 0], [1059, 43, 0], [1060, 44, 0], [1061, 45, 0\n ], [1062, 47, 0], [1063, 48, 0], [1064, 49, 0], [1065, 50, 0], [1066, \n 51, 0], [1067, 53, 0], [1072, 59, 0], [1073, 60, 0], [1074, 62, 0], [\n 1077, 65, 0], [1079, 67, 0], [1080, 70, 0], [1081, 71, 0], [1082, 72, 0\n ], [1083, 73, 0], [1084, 75, 0], [1085, 76, 0], [1086, 77, 0], [1087, \n 79, 0], [1088, 80, 0], [1089, 81, 0], [1090, 82, 0], [1091, 83, 0], [\n 1092, 84, 0], [1093, 85, 0], [1094, 88, 0], [1095, 89, 0], [1096, 90, 0\n ], [1097, 91, 0], [1098, 92, 0], [1099, 93, 0], [1101, 98, 0], [1102, \n 101, 0], [1103, 102, 0], [1104, 103, 0], [1105, 108, 0], [1106, 109, 0],\n [1107, 110, 0], [1108, 111, 0], [1109, 112, 0], [1110, 113, 0], [1111, \n 114, 0], [1112, 115, 0], [1113, 116, 0], [1114, 118, 0], [1115, 119, 0],\n [1116, 121, 0], [1117, 122, 0], [1118, 126, 0], [1119, 127, 0], [1120, \n 130, 0], [1121, 131, 0], [1122, 132, 0], [1123, 133, 0], [1124, 134, 0],\n [1125, 135, 0], [1126, 136, 0], [1127, 137, 0], [1128, 139, 0], [1129, \n 140, 0], [1130, 141, 0], [1131, 142, 0], [1132, 144, 0], [1133, 145, 0],\n [1134, 146, 0], [1135, 147, 0], [1136, 148, 0], [1137, 149, 0], [1138, \n 150, 0], [1139, 151, 0], [1140, 152, 0], [1141, 153, 0], [1142, 154, 0],\n [1143, 155, 0], [1144, 158, 0], [1145, 161, 0], [1146, 162, 0], [1147, \n 163, 0], [1148, 164, 0], [1149, 166, 0], [1150, 167, 0], [1151, 168, 0],\n [1152, 169, 0], [1153, 170, 0], [1154, 171, 0], [1155, 172, 0], [1156, \n 173, 0], [1157, 174, 0], [1158, 175, 0], [1159, 176, 0], [1160, 177, 0],\n [1161, 178, 0], [1162, 179, 0], [1163, 180, 0], [1164, 181, 0], [1165, \n 182, 0], [1166, 183, 0], [1167, 185, 0], [1168, 186, 0], [1169, 187, 0],\n [1170, 188, 0], [1173, 192, 0], [1174, 193, 0], [1175, 194, 0], [1176, \n 196, 0], [1177, 197, 0], [1178, 198, 0], [1179, 199, 0], [1180, 200, 0],\n [1181, 202, 0], [1182, 203, 0], [1183, 204, 0], [1184, 205, 0], [1185, \n 206, 0], [1186, 207, 0], [1187, 208, 0], [1188, 209, 0], [1189, 210, 0],\n [1190, 211, 0], [1191, 212, 0], [1196, 217, 0], [1197, 218, 0], [1198, \n 219, 0], [1199, 221, 0], [1200, 222, 0], [1203, 225, 0], [1204, 226, 0],\n [1211, 237, 0], [1212, 238, 0], [1213, 239, 0], [1214, 240, 0], [1215, \n 241, 0], [1216, 242, 0], [1217, 243, 0], [1218, 244, 0], [1219, 247, 0],\n [1220, 251, 0], [1221, 252, 0], [1222, 253, 0], [1225, 256, 0], [1226, \n 257, 0], [1228, 260, 0], [1229, 263, 0], [1230, 264, 0], [1231, 266, 0],\n [1232, 267, 0], [1233, 268, 0], [1235, 271, 0], [1236, 272, 0], [1237, \n 273, 0], [1238, 274, 0], [1239, 275, 0], [1240, 276, 0], [1241, 278, 0],\n [1242, 281, 0], [1243, 282, 0], [1244, 283, 0], [1245, 284, 0], [1246, \n 285, 0], [1247, 286, 0], [1248, 287, 0], [1249, 288, 0], [1250, 289, 0],\n [1251, 291, 0], [1252, 292, 0], [1253, 293, 0], [1254, 294, 0], [1255, \n 295, 0], [1256, 296, 0], [1257, 297, 0], [1258, 298, 0], [1259, 299, 0],\n [1260, 300, 0], [1261, 302, 0], [1267, 311, 0], [1274, 321, 0], [1275, \n 322, 0], [1276, 323, 0], [1277, 324, 0], [1278, 325, 0], [1282, 329, 0],\n [1283, 331, 0], [1287, 338, 0], [1288, 339, 0], [1289, 340, 0], [1290, \n 341, 0], [1291, 342, 0], [1292, 343, 0], [1293, 344, 0], [1294, 345, 0],\n [1295, 346, 0], [1300, 353, 0], [1301, 354, 0], [1302, 355, 0], [1303, \n 356, 0], [1306, 361, 0], [1307, 362, 0], [1308, 363, 0], [1312, 367, 0],\n [1317, 372, 0], [1319, 374, 0], [1323, 378, 0], [1326, 384, 0], [1327, \n 385, 0], [1328, 386, 0], [1331, 390, 0], [1336, 395, 0], [1337, 396, 0],\n [1339, 398, 0], [1340, 399, 0], [1346, 407, 0], [1348, 410, 0], [1349, \n 411, 0], [1356, 419, 0], [1357, 420, 0], [1359, 422, 0], [1360, 423, 0],\n [1361, 424, 0], [1362, 425, 0], [1363, 426, 0], [1364, 427, 0], [1365, \n 428, 0], [1366, 429, 0], [1372, 435, 0], [1373, 436, 0], [1374, 437, 0],\n [1375, 438, 0], [1376, 439, 0], [1377, 440, 0], [1378, 441, 0], [1379, \n 442, 0], [1380, 443, 0], [1381, 445, 0], [1382, 446, 0], [1383, 447, 0],\n [1384, 448, 0], [1385, 449, 0], [1386, 450, 0], [1387, 451, 0], [1388, \n 453, 0], [1389, 454, 0], [1390, 455, 0], [1391, 456, 0], [1392, 457, 0],\n [1393, 458, 0], [1394, 459, 0], [1395, 460, 0], [1396, 461, 0], [1397, \n 462, 0], [1398, 463, 0], [1399, 464, 0], [1400, 465, 0], [1401, 466, 0],\n [1402, 467, 0], [1403, 468, 0], [1404, 469, 0], [1405, 470, 0], [1406, \n 471, 0], [1407, 472, 0], [1408, 473, 0], [1409, 474, 0], [1410, 475, 0],\n [1411, 476, 0], [1412, 477, 0], [1413, 478, 0], [1414, 479, 0], [1418, \n 483, 0], [1419, 484, 0], [1421, 486, 0], [1422, 487, 0], [1423, 488, 0],\n [1424, 489, 0], [1425, 490, 0], [1426, 491, 0], [1427, 492, 0], [1428, \n 493, 0], [1431, 496, 0], [1432, 497, 0], [1433, 498, 0], [1434, 499, 0],\n [1435, 500, 0], [1436, 501, 0], [1437, 502, 0], [1438, 503, 0], [1439, \n 504, 0], [1440, 505, 0], [1443, 508, 0], [1446, 511, 0], [1447, 512, 0],\n [1448, 513, 0], [1449, 514, 0], [1450, 515, 0], [1451, 516, 0], [1452, \n 517, 0], [1453, 518, 0], [1454, 519, 0], [1455, 520, 0], [1456, 521, 0],\n [1457, 522, 0], [1458, 523, 0], [1459, 524, 0], [1460, 525, 0], [1461, \n 526, 0], [1462, 527, 0], [1463, 528, 0], [1464, 529, 0], [1465, 530, 0],\n [1466, 531, 0], [1467, 532, 0], [1468, 533, 0], [1469, 534, 0], [1470, \n 535, 0], [1471, 536, 0], [1472, 537, 0], [1473, 538, 0], [1474, 539, 0],\n [1475, 540, 0], [1476, 541, 0], [1477, 542, 0], [1482, 547, 0], [1483, \n 548, 0], [1484, 549, 0], [1485, 550, 0], [1486, 551, 0], [1489, 555, 0],\n [1490, 556, 0], [1491, 557, 0], [1492, 558, 0], [1493, 559, 0], [1494, \n 560, 0], [1495, 561, 0], [1500, 566, 0], [1501, 567, 0], [1503, 569, 0],\n [1504, 570, 0], [1512, 578, 0], [1513, 579, 0], [1518, 584, 0], [1519, \n 585, 0], [1, 490, 0], [3, 4, 1], [491, 6, 0], [7, 5, 0], [8, 9, 0], [\n 492, 11, 0], [11, 493, 0], [492, 493, 1], [494, 14, 0], [13, 15, 0], [\n 16, 5, 0], [17, 18, 1], [17, 12, 0], [14, 495, 0], [494, 19, 0], [20, \n 21, 0], [20, 22, 1], [497, 23, 0], [23, 499, 1], [25, 26, 0], [25, 22, \n 0], [23, 27, 0], [28, 23, 0], [8, 21, 0], [9, 29, 0], [30, 25, 1], [31,\n 32, 1], [32, 33, 1], [34, 35, 0], [35, 36, 0], [490, 6, 1], [37, 10, 1],\n [10, 38, 0], [37, 38, 1], [39, 40, 1], [39, 41, 1], [42, 41, 1], [18, \n 42, 1], [492, 43, 1], [44, 45, 0], [44, 505, 0], [46, 12, 0], [47, 48, \n 0], [49, 50, 0], [31, 33, 1], [31, 51, 0], [52, 53, 1], [52, 54, 0], [\n 506, 55, 0], [506, 507, 1], [57, 506, 0], [57, 58, 0], [58, 506, 0], [\n 59, 60, 1], [508, 62, 0], [30, 61, 1], [63, 506, 0], [13, 64, 0], [65, \n 66, 1], [59, 67, 0], [61, 67, 0], [68, 69, 1], [70, 69, 1], [71, 72, 1],\n [73, 74, 1], [37, 75, 1], [72, 75, 0], [37, 72, 1], [76, 77, 1], [77, \n 51, 0], [73, 72, 1], [18, 40, 1], [492, 45, 1], [10, 74, 1], [45, 511, \n 1], [78, 32, 1], [79, 80, 0], [81, 79, 1], [34, 82, 0], [83, 84, 0], [\n 83, 499, 0], [85, 86, 0], [87, 86, 1], [88, 89, 0], [90, 86, 1], [91, \n 86, 0], [86, 92, 0], [86, 93, 0], [94, 86, 1], [86, 95, 1], [513, 517, \n 0], [97, 66, 1], [42, 98, 0], [99, 100, 1], [42, 101, 0], [102, 42, 1],\n [103, 87, 0], [104, 103, 0], [105, 87, 0], [106, 107, 0], [108, 107, 0],\n [109, 106, 0], [110, 111, 1], [87, 112, 0], [113, 87, 0], [87, 85, 1],\n [110, 114, 1], [115, 116, 0], [117, 118, 0], [117, 119, 0], [117, 120, \n 1], [121, 122, 0], [123, 124, 0], [125, 126, 0], [127, 119, 0], [118, \n 128, 0], [121, 119, 0], [530, 527, 0], [125, 130, 0], [125, 123, 0], [\n 131, 132, 0], [133, 123, 0], [524, 134, 0], [135, 136, 0], [123, 131, 0\n ], [117, 128, 1], [137, 521, 0], [531, 514, 0], [139, 521, 0], [140, \n 514, 0], [522, 141, 0], [142, 523, 0], [530, 526, 0], [140, 532, 0], [\n 142, 144, 0], [140, 522, 0], [145, 146, 0], [147, 523, 0], [144, 523, 0\n ], [139, 523, 0], [140, 141, 0], [528, 526, 0], [528, 148, 0], [149, \n 150, 0], [145, 528, 0], [530, 151, 0], [524, 152, 0], [149, 525, 1], [\n 139, 514, 0], [126, 120, 1], [530, 153, 0], [528, 147, 1], [528, 154, 0\n ], [130, 120, 1], [528, 155, 1], [524, 533, 0], [524, 149, 0], [154, \n 150, 0], [157, 110, 1], [119, 158, 0], [159, 60, 0], [536, 161, 0], [\n 115, 151, 0], [162, 134, 0], [115, 526, 0], [138, 87, 0], [123, 163, 0],\n [112, 164, 0], [112, 165, 0], [166, 165, 0], [167, 537, 0], [168, 104, \n 0], [531, 520, 0], [139, 520, 0], [520, 169, 0], [168, 105, 0], [520, \n 170, 0], [171, 89, 0], [521, 172, 0], [123, 173, 0], [521, 174, 0], [37,\n 39, 0], [530, 175, 0], [530, 176, 0], [88, 530, 0], [177, 496, 1], [178,\n 525, 0], [179, 493, 1], [180, 181, 1], [182, 180, 0], [179, 181, 0], [\n 180, 493, 1], [183, 30, 0], [183, 21, 0], [538, 185, 0], [538, 89, 0],\n [184, 186, 0], [184, 187, 0], [520, 172, 0], [89, 175, 0], [185, 89, 0],\n [89, 188, 0], [189, 190, 0], [539, 172, 0], [504, 192, 0], [105, 186, 0\n ], [105, 187, 0], [539, 193, 0], [187, 194, 0], [539, 540, 0], [539, \n 196, 0], [197, 540, 0], [110, 198, 0], [197, 539, 0], [199, 537, 0], [\n 134, 526, 0], [200, 193, 0], [4, 201, 1], [202, 86, 0], [85, 203, 0], [\n 147, 204, 0], [147, 205, 0], [123, 206, 0], [537, 207, 0], [165, 208, 0\n ], [4, 94, 1], [4, 2, 0], [209, 4, 0], [119, 163, 0], [210, 3, 0], [99,\n 211, 0], [99, 69, 1], [212, 99, 0], [213, 214, 0], [510, 215, 0], [128,\n 69, 1], [216, 69, 1], [217, 98, 0], [504, 218, 0], [177, 504, 1], [219,\n 209, 0], [219, 220, 0], [94, 95, 1], [159, 221, 1], [34, 161, 0], [222,\n 221, 0], [211, 52, 1], [215, 223, 1], [224, 215, 0], [225, 224, 1], [\n 224, 223, 0], [226, 6, 0], [7, 3, 1], [216, 227, 1], [228, 229, 0], [\n 227, 230, 0], [231, 53, 1], [544, 545, 0], [234, 235, 1], [546, 214, 1],\n [233, 227, 0], [237, 238, 0], [212, 100, 0], [519, 239, 0], [238, 519, \n 0], [213, 240, 0], [241, 242, 1], [70, 241, 0], [509, 213, 0], [68, 243,\n 0], [243, 244, 0], [68, 244, 0], [544, 547, 1], [245, 227, 1], [246, \n 208, 0], [112, 208, 0], [165, 247, 0], [537, 549, 0], [537, 550, 0], [\n 537, 551, 0], [110, 251, 0], [510, 252, 1], [529, 253, 1], [237, 239, 1\n ], [254, 238, 1], [69, 255, 0], [510, 225, 1], [256, 257, 0], [258, 190,\n 0], [258, 259, 0], [260, 261, 1], [554, 553, 1], [515, 263, 0], [14, \n 264, 1], [116, 555, 0], [151, 116, 0], [111, 114, 1], [77, 111, 0], [\n 266, 525, 0], [267, 120, 1], [268, 269, 0], [556, 271, 0], [556, 272, 0\n ], [529, 273, 0], [128, 274, 0], [34, 275, 0], [503, 276, 0], [503, 504,\n 1], [177, 218, 1], [277, 278, 1], [557, 558, 1], [557, 559, 1], [559, \n 558, 1], [277, 78, 1], [277, 279, 1], [78, 279, 0], [281, 282, 0], [283,\n 161, 1], [268, 161, 1], [256, 284, 0], [515, 516, 1], [263, 516, 0], [\n 516, 285, 0], [63, 286, 0], [287, 516, 0], [8, 102, 1], [8, 101, 1], [\n 80, 288, 0], [80, 289, 0], [276, 560, 0], [37, 290, 0], [290, 74, 1], [\n 512, 291, 0], [78, 292, 1], [199, 548, 0], [491, 293, 0], [4, 294, 0],\n [490, 541, 1], [491, 295, 0], [491, 296, 0], [295, 297, 0], [508, 161, \n 0], [117, 123, 0], [133, 117, 0], [71, 74, 1], [74, 278, 1], [298, 515,\n 0], [5, 299, 0], [32, 292, 1], [5, 29, 1], [503, 560, 0], [300, 301, 1],\n [51, 300, 0], [244, 302, 1], [31, 302, 1], [51, 282, 1], [303, 304, 0],\n [305, 304, 0], [305, 259, 0], [306, 307, 1], [305, 308, 0], [305, 309, \n 0], [310, 309, 1], [306, 309, 1], [311, 280, 0], [280, 278, 1], [311, \n 32, 1], [13, 312, 1], [313, 314, 0], [312, 313, 1], [547, 566, 1], [245,\n 315, 1], [312, 316, 0], [312, 314, 0], [554, 546, 1], [262, 216, 1], [\n 317, 233, 0], [318, 317, 0], [231, 52, 1], [319, 567, 0], [557, 321, 0],\n [277, 65, 1], [322, 288, 1], [322, 323, 0], [277, 324, 1], [324, 325, 0\n ], [277, 325, 0], [326, 327, 0], [328, 326, 1], [328, 327, 1], [326, \n 329, 0], [568, 329, 1], [568, 326, 0], [332, 78, 1], [333, 306, 0], [\n 332, 333, 0], [332, 334, 0], [66, 334, 1], [330, 335, 1], [336, 66, 0],\n [330, 336, 1], [68, 70, 0], [509, 337, 1], [324, 288, 0], [338, 559, 0],\n [339, 559, 0], [339, 340, 1], [559, 340, 1], [341, 292, 0], [557, 342, \n 0], [558, 343, 0], [502, 340, 1], [72, 32, 1], [344, 345, 0], [346, 47,\n 0], [46, 47, 0], [346, 345, 0], [347, 328, 0], [347, 348, 1], [571, 348,\n 1], [347, 572, 0], [571, 570, 1], [14, 350, 0], [350, 573, 0], [15, 351,\n 1], [352, 15, 0], [15, 335, 1], [232, 227, 0], [565, 544, 1], [235, 567,\n 1], [567, 286, 0], [353, 519, 0], [354, 353, 0], [355, 354, 0], [354, \n 356, 0], [357, 358, 0], [574, 359, 0], [235, 575, 0], [167, 361, 0], [\n 528, 362, 0], [363, 344, 0], [259, 364, 1], [54, 56, 0], [365, 364, 0],\n [231, 366, 0], [30, 367, 0], [61, 367, 1], [254, 368, 0], [254, 369, 0],\n [254, 370, 0], [99, 358, 0], [354, 519, 0], [571, 371, 0], [207, 372, 0\n ], [57, 373, 0], [209, 374, 0], [375, 376, 0], [376, 377, 0], [16, 49, \n 0], [318, 377, 0], [378, 297, 0], [562, 379, 0], [576, 563, 0], [576, \n 381, 0], [577, 576, 1], [244, 383, 0], [244, 306, 1], [383, 306, 1], [\n 380, 306, 0], [252, 225, 0], [220, 76, 0], [542, 384, 0], [385, 384, 0],\n [542, 385, 0], [386, 385, 0], [387, 578, 0], [332, 388, 1], [382, 332, \n 1], [382, 388, 0], [579, 578, 0], [577, 387, 1], [144, 390, 0], [37, 49,\n 0], [391, 233, 0], [392, 310, 0], [260, 393, 0], [394, 230, 0], [395, \n 282, 1], [395, 244, 0], [25, 396, 1], [81, 74, 0], [278, 80, 1], [81, \n 278, 1], [569, 570, 0], [397, 552, 0], [542, 398, 0], [398, 385, 0], [\n 399, 499, 0], [83, 399, 0], [498, 400, 0], [518, 239, 1], [575, 543, 0],\n [401, 360, 0], [580, 581, 0], [401, 402, 0], [403, 231, 0], [189, 360, \n 1], [234, 404, 0], [235, 404, 1], [235, 580, 0], [216, 259, 0], [405, \n 259, 0], [405, 318, 0], [406, 230, 0], [542, 407, 0], [23, 408, 0], [\n 577, 348, 0], [562, 564, 1], [582, 507, 0], [27, 410, 0], [501, 27, 0],\n [27, 411, 0], [411, 410, 0], [403, 360, 0], [412, 360, 0], [326, 413, 0\n ], [414, 413, 0], [6, 297, 0], [554, 580, 1], [262, 401, 1], [499, 556,\n 1], [224, 229, 0], [583, 507, 0], [415, 307, 0], [416, 507, 0], [284, \n 561, 0], [543, 417, 0], [418, 506, 0], [220, 157, 0], [295, 419, 0], [\n 295, 420, 0], [541, 62, 0], [52, 421, 0], [60, 160, 0], [535, 161, 0],\n [267, 282, 0], [52, 365, 0], [28, 27, 0], [30, 201, 1], [422, 81, 0], [\n 119, 425, 0], [423, 425, 0], [424, 425, 0], [426, 428, 0], [427, 428, 0\n ], [19, 428, 1], [45, 429, 0], [44, 429, 0], [505, 429, 0], [231, 431, \n 1], [190, 431, 1], [430, 431, 0], [286, 433, 0], [432, 433, 0], [506, \n 433, 0], [23, 434, 0], [400, 434, 0], [500, 434, 0], [32, 436, 0], [435,\n 436, 0], [78, 436, 1], [86, 438, 1], [437, 438, 0], [221, 438, 0], [207,\n 439, 0], [516, 439, 0], [513, 439, 0], [181, 441, 1], [440, 441, 0], [\n 504, 441, 1], [135, 442, 0], [109, 442, 0], [112, 442, 0], [113, 443, 0\n ], [132, 443, 0], [107, 443, 0], [444, 445, 0], [112, 445, 0], [109, \n 445, 0], [119, 447, 1], [100, 447, 1], [446, 447, 0], [124, 448, 0], [\n 125, 448, 0], [131, 448, 0], [449, 450, 0], [173, 450, 0], [184, 450, 0\n ], [144, 451, 0], [140, 451, 0], [514, 451, 0], [537, 585, 1], [141, \n 585, 0], [584, 585, 0], [522, 454, 0], [144, 454, 0], [453, 454, 0], [\n 199, 456, 0], [140, 456, 0], [455, 456, 0], [537, 456, 0], [538, 457, 0\n ], [153, 457, 0], [176, 457, 0], [524, 459, 0], [458, 459, 0], [134, \n 459, 0], [460, 461, 0], [150, 461, 0], [149, 461, 0], [521, 463, 0], [\n 462, 463, 0], [538, 463, 0], [110, 464, 0], [90, 464, 0], [165, 464, 0],\n [458, 465, 0], [134, 465, 0], [524, 465, 0], [466, 467, 0], [110, 467, \n 0], [165, 467, 0], [468, 469, 0], [541, 469, 0], [490, 469, 0], [263, \n 471, 0], [470, 471, 0], [534, 471, 0], [136, 472, 0], [110, 472, 0], [\n 251, 472, 0], [226, 474, 0], [473, 474, 0], [257, 474, 0], [6, 474, 1],\n [299, 475, 1], [3, 475, 0], [210, 475, 0], [297, 476, 0], [296, 476, 0],\n [295, 476, 0], [313, 478, 1], [477, 478, 0], [245, 478, 0], [479, 481, \n 0], [565, 481, 0], [480, 481, 0], [415, 482, 0], [56, 482, 0], [409, \n 482, 0], [483, 484, 0], [3, 484, 0], [301, 484, 0], [233, 485, 0], [392,\n 485, 0], [391, 485, 0], [579, 488, 0], [486, 488, 0], [487, 488, 0], [\n 270, 489, 0], [331, 489, 0], [396, 489, 1], [519, 253, 0], [382, 349, 1\n ], [349, 351, 0], [459, 465, 0], [549, 550, 0], [550, 551, 0], [194, \n 195, 0], [247, 248, 0], [2, 294, 0], [549, 551, 0], [54, 365, 0], [131,\n 265, 0], [91, 92, 0], [247, 249, 0], [186, 191, 0], [129, 173, 0], [96,\n 202, 0], [53, 320, 0], [24, 396, 0], [133, 156, 0], [442, 452, 0], [445,\n 452, 0], [247, 250, 0], [187, 195, 0], [216, 236, 0], [244, 389, 0], [\n 394, 406, 0], [442, 445, 0], [442, 444, 0], [198, 472, 0], [464, 467, 0\n ], [198, 251, 0], [112, 143, 0], [2, 490, 0], [5, 491, 0], [10, 492, 0],\n [12, 493, 0], [13, 494, 0], [15, 495, 0], [18, 496, 0], [20, 497, 0], [\n 22, 498, 0], [24, 499, 0], [26, 500, 0], [30, 501, 0], [32, 502, 0], [\n 37, 503, 0], [42, 504, 0], [46, 505, 0], [52, 506, 0], [56, 507, 0], [\n 61, 508, 0], [68, 509, 0], [69, 510, 0], [74, 511, 0], [78, 512, 0], [\n 86, 513, 0], [87, 514, 0], [94, 515, 0], [95, 516, 0], [96, 517, 0], [\n 99, 518, 0], [100, 519, 0], [104, 520, 0], [105, 521, 0], [106, 522, 0],\n [107, 523, 0], [117, 524, 0], [120, 525, 0], [123, 526, 0], [124, 527, \n 0], [125, 528, 0], [128, 529, 0], [129, 530, 0], [138, 531, 0], [143, \n 532, 0], [156, 533, 0], [157, 534, 0], [159, 535, 0], [160, 536, 0], [\n 165, 537, 0], [184, 538, 0], [191, 539, 0], [195, 540, 0], [201, 541, 0\n ], [220, 542, 0], [231, 543, 0], [232, 544, 0], [233, 545, 0], [236, \n 546, 0], [245, 547, 0], [246, 548, 0], [248, 549, 0], [249, 550, 0], [\n 250, 551, 0], [259, 552, 0], [261, 553, 0], [262, 554, 0], [265, 555, 0\n ], [270, 556, 0], [277, 557, 0], [279, 558, 0], [280, 559, 0], [290, \n 560, 0], [301, 561, 0], [305, 562, 0], [306, 563, 0], [310, 564, 0], [\n 313, 565, 0], [315, 566, 0], [320, 567, 0], [330, 568, 0], [332, 569, 0\n ], [334, 570, 0], [336, 571, 0], [349, 572, 0], [351, 573, 0], [358, \n 574, 0], [360, 575, 0], [380, 576, 0], [382, 577, 0], [383, 578, 0], [\n 389, 579, 0], [401, 580, 0], [402, 581, 0], [409, 582, 0], [415, 583, 0\n ], [444, 584, 0], [452, 585, 0]])\n', (355324, 378460), False, 'from numpy import array\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 12 14:20:34 2020
@author: sixingliu, yaruchen
"""
import pandas as pd
import numpy as np
#from pandas.core.indexes.numeric import (NumericIndex, Float64Index, # noqa
# Int64Index, UInt64Index)
#from pandas.core.indexes.datetimes import DatetimeIndex
class BaseSensorSeries(pd.Series):
@property
def _constructor(self):
return BaseSensorSeries
@property
def _constructor_expanddim(self):
return BaseSensorFrame
@property
def time(self):
return self.index
@time.setter
def time(self, time):
# TODO: add index validtion function
time = np.array(time)
self.index = time
@property
def sigs(self):
return self.values
@sigs.setter
def sigs(self, sigs):
# TODO: add signal validtion function
sigs = np.array(sigs)
self.values = sigs
_frequency = None
@property
def frequency(self):
return self._frequency
@frequency.setter
def frequency(self, frequency):
self._frequency = frequency
class BaseSensorFrame(pd.DataFrame):
@property
def _constructor(self):
return BaseSensorFrame
@property
def _constructor_sliced(self):
return BaseSensorSeries
@property
def time(self):
return self.index
@time.setter
def time(self, time):
# TODO: add index validtion function
time = np.array(time)
self.index = time
@property
def sigs(self):
return self.values
@sigs.setter
def sigs(self, sigs):
# TODO: add signal validtion function
sigs = np.array(sigs)
self.values = sigs
_frequency = None
@property
def frequency(self):
return self._frequency
@frequency.setter
def frequency(self, frequency):
self._frequency = frequency
@pd.api.extensions.register_series_accessor("freq")
@pd.api.extensions.register_dataframe_accessor("freq")
class FreqAccessor:
def __init__(self, pandas_obj):
self._validate(pandas_obj)
self._obj = pandas_obj
@staticmethod
def _validate(obj):
pass
@property
def frequency(self):
return self._obj.frequency
@frequency.setter
def frequency(self, frequency):
self._obj.frequency = frequency
def to_freqindex(self, frequency=None):
if not frequency:
frequency = self.frequency
self._obj.index = self._obj.index / frequency
|
[
"pandas.api.extensions.register_dataframe_accessor",
"pandas.api.extensions.register_series_accessor",
"numpy.array"
] |
[((2011, 2061), 'pandas.api.extensions.register_series_accessor', 'pd.api.extensions.register_series_accessor', (['"""freq"""'], {}), "('freq')\n", (2053, 2061), True, 'import pandas as pd\n'), ((2063, 2116), 'pandas.api.extensions.register_dataframe_accessor', 'pd.api.extensions.register_dataframe_accessor', (['"""freq"""'], {}), "('freq')\n", (2108, 2116), True, 'import pandas as pd\n'), ((708, 722), 'numpy.array', 'np.array', (['time'], {}), '(time)\n', (716, 722), True, 'import numpy as np\n'), ((928, 942), 'numpy.array', 'np.array', (['sigs'], {}), '(sigs)\n', (936, 942), True, 'import numpy as np\n'), ((1540, 1554), 'numpy.array', 'np.array', (['time'], {}), '(time)\n', (1548, 1554), True, 'import numpy as np\n'), ((1760, 1774), 'numpy.array', 'np.array', (['sigs'], {}), '(sigs)\n', (1768, 1774), True, 'import numpy as np\n')]
|
import smtplib
import datetime
import numpy as np
import pandas as pd
from email.mime.text import MIMEText
from yahoo_fin import stock_info as si
from pandas_datareader import DataReader
from email.mime.multipart import MIMEMultipart
from bs4 import BeautifulSoup
from urllib.request import urlopen, Request
from nltk.sentiment.vader import SentimentIntensityAnalyzer
from time import sleep
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import talib
# Define list of stocks
stock_list = ['AAPL', 'MSFT', 'AMZN']
# for the tradingview recommendation
# options are: '1m', '5m', '15m', '1h', '4h', '1D', '1W', '1M
interval = "1M"
# Chromedriver Path
path = '/Users/shashank/Documents/Code/Python/Finance/chromedriver.exe'
# Chromedriver Options
options = Options()
options.add_argument("--headless")
webdriver = webdriver.Chrome(executable_path=path, options=options)
# Define start and end dates
start = datetime.datetime.now() - datetime.timedelta(days=365)
end = datetime.datetime.now()
def sendMessage(text):
message = text
email = ""
pas = ""
sms_gateway = ''
smtp = "smtp.gmail.com"
port = 587
server = smtplib.SMTP(smtp,port)
server.starttls()
server.login(email,pas)
msg = MIMEMultipart()
msg['From'] = email
msg['To'] = sms_gateway
msg['Subject'] = "Stock Data\n"
body = "{}\n".format(message)
msg.attach(MIMEText(body, 'plain'))
sms = msg.as_string()
server.sendmail(email,sms_gateway,sms)
server.quit()
print ('done')
def getData(list_of_stocks):
for stock in list_of_stocks:
df = DataReader(stock, 'yahoo', start, end)
print (stock)
# Current Price
price = si.get_live_price('{}'.format(stock))
price = round(price, 2)
# Sharpe Ratio
x = 5000
y = (x)
stock_df = df
stock_df['Norm return'] = stock_df['Adj Close'] / stock_df.iloc[0]['Adj Close']
allocation = float(x/y)
stock_df['Allocation'] = stock_df['Norm return'] * allocation
stock_df['Position'] = stock_df['Allocation'] * x
pos = [df['Position']]
val = pd.concat(pos, axis=1)
val.columns = ['WMT Pos']
val['Total Pos'] = val.sum(axis=1)
val.tail(1)
val['Daily Return'] = val['Total Pos'].pct_change(1)
Sharpe_Ratio = val['Daily Return'].mean() / val['Daily Return'].std()
A_Sharpe_Ratio = (252**0.5) * Sharpe_Ratio
A_Sharpe_Ratio = round(A_Sharpe_Ratio, 2)
# News Sentiment
finwiz_url = 'https://finviz.com/quote.ashx?t='
news_tables = {}
url = finwiz_url + stock
req = Request(url=url,headers={'user-agent': 'my-app/0.0.1'})
response = urlopen(req)
html = BeautifulSoup(response, features="lxml")
news_table = html.find(id='news-table')
news_tables[stock] = news_table
parsed_news = []
# Iterate through the news
for file_name, news_table in news_tables.items():
for x in news_table.findAll('tr'):
text = x.a.get_text()
date_scrape = x.td.text.split()
if len(date_scrape) == 1:
time = date_scrape[0]
else:
date = date_scrape[0]
time = date_scrape[1]
ticker = file_name.split('_')[0]
parsed_news.append([ticker, date, time, text])
vader = SentimentIntensityAnalyzer()
columns = ['ticker', 'date', 'time', 'headline']
dataframe = pd.DataFrame(parsed_news, columns=columns)
scores = dataframe['headline'].apply(vader.polarity_scores).tolist()
scores_df = pd.DataFrame(scores)
dataframe = dataframe.join(scores_df, rsuffix='_right')
dataframe['date'] = pd.to_datetime(dataframe.date).dt.date
dataframe = dataframe.set_index('ticker')
sentiment = round(dataframe['compound'].mean(), 2)
# TradingView Recommendation
try:
#Declare variable
analysis = []
#Open tradingview's site
webdriver.get("https://s.tradingview.com/embed-widget/technical-analysis/?locale=en#%7B%22interval%22%3A%22{}%22%2C%22width%22%3A%22100%25%22%2C%22isTransparent%22%3Afalse%2C%22height%22%3A%22100%25%22%2C%22symbol%22%3A%22{}%22%2C%22showIntervalTabs%22%3Atrue%2C%22colorTheme%22%3A%22dark%22%2C%22utm_medium%22%3A%22widget_new%22%2C%22utm_campaign%22%3A%22technical-analysis%22%7D".format(interval, ticker))
webdriver.refresh()
#Wait for site to load elements
while len(webdriver.find_elements_by_class_name("speedometerSignal-pyzN--tL")) == 0:
sleep(0.1)
#Recommendation
recommendation_element = webdriver.find_element_by_class_name("speedometerSignal-pyzN--tL")
analysis.append(recommendation_element.get_attribute('innerHTML'))
#Counters
counter_elements = webdriver.find_elements_by_class_name("counterNumber-3l14ys0C")
#Sell
analysis.append(int(counter_elements[0].get_attribute('innerHTML')))
#Neutral
analysis.append(int(counter_elements[1].get_attribute('innerHTML')))
#Buy
analysis.append(int(counter_elements[2].get_attribute('innerHTML')))
signal = analysis[0]
except:
signal = 'None'
# Beta
df = DataReader(stock,'yahoo',start, end)
dfb = DataReader('^GSPC','yahoo',start, end)
rts = df.resample('M').last()
rbts = dfb.resample('M').last()
dfsm = pd.DataFrame({'s_adjclose' : rts['Adj Close'],
'b_adjclose' : rbts['Adj Close']},
index=rts.index)
dfsm[['s_returns','b_returns']] = dfsm[['s_adjclose','b_adjclose']]/\
dfsm[['s_adjclose','b_adjclose']].shift(1) -1
dfsm = dfsm.dropna()
covmat = np.cov(dfsm["s_returns"],dfsm["b_returns"])
beta = covmat[0,1]/covmat[1,1]
beta = round(beta, 2)
# Relative Strength Index
df["rsi"] = talib.RSI(df["Close"])
values = df["rsi"].tail(14)
value = values.mean()
rsi = round(value, 2)
output = ("\nTicker: " + str(stock) + "\nCurrent Price : " + str(price) + "\nSharpe Ratio: " + str(A_Sharpe_Ratio) + "\nNews Sentiment: " + str(sentiment) + "\nTradingView Rec for {}: ".format(interval) + str(signal) + "\nRelative Strength Index: " + str(rsi) + "\nBeta Value for 1 Year: " + str(beta))
sendMessage(output)
if __name__ == '__main__':
getData(stock_list)
|
[
"pandas_datareader.DataReader",
"urllib.request.Request",
"email.mime.text.MIMEText",
"selenium.webdriver.find_element_by_class_name",
"pandas.DataFrame",
"selenium.webdriver.find_elements_by_class_name",
"smtplib.SMTP",
"urllib.request.urlopen",
"email.mime.multipart.MIMEMultipart",
"datetime.timedelta",
"selenium.webdriver.refresh",
"numpy.cov",
"datetime.datetime.now",
"pandas.concat",
"nltk.sentiment.vader.SentimentIntensityAnalyzer",
"time.sleep",
"pandas.to_datetime",
"talib.RSI",
"bs4.BeautifulSoup",
"selenium.webdriver.chrome.options.Options",
"selenium.webdriver.Chrome"
] |
[((797, 806), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (804, 806), False, 'from selenium.webdriver.chrome.options import Options\n'), ((854, 909), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': 'path', 'options': 'options'}), '(executable_path=path, options=options)\n', (870, 909), False, 'from selenium import webdriver\n'), ((1009, 1032), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1030, 1032), False, 'import datetime\n'), ((948, 971), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (969, 971), False, 'import datetime\n'), ((974, 1002), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (992, 1002), False, 'import datetime\n'), ((1188, 1212), 'smtplib.SMTP', 'smtplib.SMTP', (['smtp', 'port'], {}), '(smtp, port)\n', (1200, 1212), False, 'import smtplib\n'), ((1273, 1288), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', ([], {}), '()\n', (1286, 1288), False, 'from email.mime.multipart import MIMEMultipart\n'), ((1426, 1449), 'email.mime.text.MIMEText', 'MIMEText', (['body', '"""plain"""'], {}), "(body, 'plain')\n", (1434, 1449), False, 'from email.mime.text import MIMEText\n'), ((1645, 1683), 'pandas_datareader.DataReader', 'DataReader', (['stock', '"""yahoo"""', 'start', 'end'], {}), "(stock, 'yahoo', start, end)\n", (1655, 1683), False, 'from pandas_datareader import DataReader\n'), ((2242, 2264), 'pandas.concat', 'pd.concat', (['pos'], {'axis': '(1)'}), '(pos, axis=1)\n', (2251, 2264), True, 'import pandas as pd\n'), ((2831, 2887), 'urllib.request.Request', 'Request', ([], {'url': 'url', 'headers': "{'user-agent': 'my-app/0.0.1'}"}), "(url=url, headers={'user-agent': 'my-app/0.0.1'})\n", (2838, 2887), False, 'from urllib.request import urlopen, Request\n'), ((2907, 2919), 'urllib.request.urlopen', 'urlopen', (['req'], {}), '(req)\n', (2914, 2919), False, 'from urllib.request import urlopen, Request\n'), ((2939, 2979), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response'], {'features': '"""lxml"""'}), "(response, features='lxml')\n", (2952, 2979), False, 'from bs4 import BeautifulSoup\n'), ((3729, 3757), 'nltk.sentiment.vader.SentimentIntensityAnalyzer', 'SentimentIntensityAnalyzer', ([], {}), '()\n', (3755, 3757), False, 'from nltk.sentiment.vader import SentimentIntensityAnalyzer\n'), ((3844, 3886), 'pandas.DataFrame', 'pd.DataFrame', (['parsed_news'], {'columns': 'columns'}), '(parsed_news, columns=columns)\n', (3856, 3886), True, 'import pandas as pd\n'), ((3993, 4013), 'pandas.DataFrame', 'pd.DataFrame', (['scores'], {}), '(scores)\n', (4005, 4013), True, 'import pandas as pd\n'), ((5878, 5916), 'pandas_datareader.DataReader', 'DataReader', (['stock', '"""yahoo"""', 'start', 'end'], {}), "(stock, 'yahoo', start, end)\n", (5888, 5916), False, 'from pandas_datareader import DataReader\n'), ((5929, 5969), 'pandas_datareader.DataReader', 'DataReader', (['"""^GSPC"""', '"""yahoo"""', 'start', 'end'], {}), "('^GSPC', 'yahoo', start, end)\n", (5939, 5969), False, 'from pandas_datareader import DataReader\n'), ((6070, 6171), 'pandas.DataFrame', 'pd.DataFrame', (["{'s_adjclose': rts['Adj Close'], 'b_adjclose': rbts['Adj Close']}"], {'index': 'rts.index'}), "({'s_adjclose': rts['Adj Close'], 'b_adjclose': rbts[\n 'Adj Close']}, index=rts.index)\n", (6082, 6171), True, 'import pandas as pd\n'), ((6433, 6477), 'numpy.cov', 'np.cov', (["dfsm['s_returns']", "dfsm['b_returns']"], {}), "(dfsm['s_returns'], dfsm['b_returns'])\n", (6439, 6477), True, 'import numpy as np\n'), ((6627, 6649), 'talib.RSI', 'talib.RSI', (["df['Close']"], {}), "(df['Close'])\n", (6636, 6649), False, 'import talib\n'), ((4873, 4892), 'selenium.webdriver.refresh', 'webdriver.refresh', ([], {}), '()\n', (4890, 4892), False, 'from selenium import webdriver\n'), ((5144, 5210), 'selenium.webdriver.find_element_by_class_name', 'webdriver.find_element_by_class_name', (['"""speedometerSignal-pyzN--tL"""'], {}), "('speedometerSignal-pyzN--tL')\n", (5180, 5210), False, 'from selenium import webdriver\n'), ((5352, 5415), 'selenium.webdriver.find_elements_by_class_name', 'webdriver.find_elements_by_class_name', (['"""counterNumber-3l14ys0C"""'], {}), "('counterNumber-3l14ys0C')\n", (5389, 5415), False, 'from selenium import webdriver\n'), ((4115, 4145), 'pandas.to_datetime', 'pd.to_datetime', (['dataframe.date'], {}), '(dataframe.date)\n', (4129, 4145), True, 'import pandas as pd\n'), ((5059, 5069), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (5064, 5069), False, 'from time import sleep\n'), ((4968, 5035), 'selenium.webdriver.find_elements_by_class_name', 'webdriver.find_elements_by_class_name', (['"""speedometerSignal-pyzN--tL"""'], {}), "('speedometerSignal-pyzN--tL')\n", (5005, 5035), False, 'from selenium import webdriver\n')]
|
"""
Quick and dirty script to compute shape tensor of a FITS image.
:author: <NAME>
:contact: <EMAIL>
:version: 0.1
"""
import numpy as np
import pyfits as pf
import glob as g
def computeShapeTensor(data):
"""
Computes a shape tensor from 2D imaging array.
:Warning: This function has been adapted from Fortran
and thus is very slow because of the nested
loops. Need to be rewritten.
:param data: imaging data as a numpy array
:type data: ndarray
:return: half of the size of the object in x and y direction
:rtype: dict
"""
data = data.transpose()
xdim, ydim = data.shape
Qxx = 0.
Qxy = 0.
Qyy = 0.
for i in range(xdim):
for j in range(ydim):
Qxx += data[j, i] * (i - 0.5 * (xdim - 1)) * (i - 0.5 * (xdim - 1))
Qxy += data[j, i] * (i - 0.5 * (xdim - 1)) * (j - 0.5 * (ydim - 1))
Qyy += data[j, i] * (j - 0.5 * (ydim - 1)) * (j - 0.5 * (ydim - 1))
shx = (Qxx + Qyy + np.sqrt((Qxx - Qyy)**2 + 4. * Qxy * Qxy)) / 2.
shy = (Qxx + Qyy - np.sqrt((Qxx - Qyy)**2 + 4. * Qxy * Qxy)) / 2.
shapex = np.sqrt(shx / np.sum(data))
shapey = np.sqrt(shy / np.sum(data))
return dict(shapex=shapex, shapey=shapey)
if __name__ == '__main__':
files = g.glob('*.fits')
for file in files:
print ('File = %s' % file)
data = pf.getdata(file)
shape = computeShapeTensor(data)
print (shape)
|
[
"numpy.sqrt",
"numpy.sum",
"pyfits.getdata",
"glob.glob"
] |
[((1296, 1312), 'glob.glob', 'g.glob', (['"""*.fits"""'], {}), "('*.fits')\n", (1302, 1312), True, 'import glob as g\n'), ((1387, 1403), 'pyfits.getdata', 'pf.getdata', (['file'], {}), '(file)\n', (1397, 1403), True, 'import pyfits as pf\n'), ((1008, 1051), 'numpy.sqrt', 'np.sqrt', (['((Qxx - Qyy) ** 2 + 4.0 * Qxy * Qxy)'], {}), '((Qxx - Qyy) ** 2 + 4.0 * Qxy * Qxy)\n', (1015, 1051), True, 'import numpy as np\n'), ((1078, 1121), 'numpy.sqrt', 'np.sqrt', (['((Qxx - Qyy) ** 2 + 4.0 * Qxy * Qxy)'], {}), '((Qxx - Qyy) ** 2 + 4.0 * Qxy * Qxy)\n', (1085, 1121), True, 'import numpy as np\n'), ((1153, 1165), 'numpy.sum', 'np.sum', (['data'], {}), '(data)\n', (1159, 1165), True, 'import numpy as np\n'), ((1194, 1206), 'numpy.sum', 'np.sum', (['data'], {}), '(data)\n', (1200, 1206), True, 'import numpy as np\n')]
|
## @ingroup Methods-Aerodynamics-Common-Fidelity_Zero-Lift
# generate_propeller_wake_distribution.py
#
# Created: Sep 2020, <NAME>
# Modified: May 2021, <NAME>
# Jul 2021, <NAME>
# Jul 2021, <NAME>
# Sep 2021, <NAME>
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
# package imports
import numpy as np
from SUAVE.Core import Data
from SUAVE.Methods.Aerodynamics.Common.Fidelity_Zero.Lift.compute_wake_contraction_matrix import compute_wake_contraction_matrix
from SUAVE.Methods.Geometry.Two_Dimensional.Cross_Section.Airfoil.import_airfoil_geometry import import_airfoil_geometry
## @ingroup Methods-Aerodynamics-Common-Fidelity_Zero-Lift
def generate_propeller_wake_distribution(props,identical,m,VD,init_timestep_offset, time, number_of_wake_timesteps,conditions ):
""" This generates the propeller wake control points used to compute the
influence of the wake
Assumptions:
None
Source:
None
Inputs:
identical - if all props are identical [Bool]
m - control point [Unitless]
VD - vortex distribution
prop - propeller/rotor data structure
init_timestep_offset - intial time step [Unitless]
time - time [s]
number_of_wake_timesteps - number of wake timesteps [Unitless]
conditions.
noise.sources.propellers - propeller noise sources data structure
Properties Used:
N/A
"""
num_prop = len(props)
if identical:
# All props are identical in geometry, so only the first one is unpacked
prop_keys = list(props.keys())
prop_key = prop_keys[0]
prop = props[prop_key]
prop_outputs = conditions.noise.sources.propellers[prop_key]
Bmax = int(prop.number_of_blades)
nmax = int(prop_outputs.number_radial_stations - 1)
else:
# Props are unique, must find required matrix sizes to fit all vortex distributions
prop_keys = list(props.keys())
B_list = np.ones(len(prop_keys))
Nr_list = np.ones(len(prop_keys))
for i in range(len(prop_keys)):
p_key = list(props.keys())[i]
p = props[p_key]
p_out = conditions.noise.sources.propellers[p_key]
B_list[i] = p.number_of_blades
Nr_list[i] = p_out.number_radial_stations
# Identify max indices for pre-allocating vortex wake distribution matrices
Bmax = int(max(B_list))
nmax = int(max(Nr_list)-1)
# Add additional time step to include lifting line panel on rotor
nts = number_of_wake_timesteps
# Initialize empty arrays with required sizes
VD, WD, Wmid = initialize_distributions(nmax, Bmax, nts, num_prop, m,VD)
# for each propeller, unpack and compute
for i, propi in enumerate(props):
propi_key = list(props.keys())[i]
if identical:
propi_outputs = prop_outputs
else:
propi_outputs = conditions.noise.sources.propellers[propi_key]
# Unpack
R = propi.tip_radius
r = propi.radius_distribution
c = propi.chord_distribution
MCA = propi.mid_chord_alignment
B = propi.number_of_blades
Na = propi_outputs.number_azimuthal_stations
Nr = propi_outputs.number_radial_stations
omega = propi_outputs.omega
va = propi_outputs.disc_axial_induced_velocity
V_inf = propi_outputs.velocity
gamma = propi_outputs.disc_circulation
blade_angles = np.linspace(0,2*np.pi,B+1)[:-1]
dt = time/number_of_wake_timesteps
ts = np.linspace(0,time,number_of_wake_timesteps)
t0 = dt*init_timestep_offset
start_angle = omega[0]*t0
propi.start_angle = start_angle[0]
# compute lambda and mu
mean_induced_velocity = np.mean( np.mean(va,axis = 1),axis = 1)
alpha = propi.orientation_euler_angles[1]
rots = np.array([[np.cos(alpha), 0, np.sin(alpha)], [0,1,0], [-np.sin(alpha), 0, np.cos(alpha)]])
lambda_tot = np.atleast_2d((np.dot(V_inf,rots[0]) + mean_induced_velocity)).T /(omega*R) # inflow advance ratio (page 99 Leishman)
mu_prop = np.atleast_2d(np.dot(V_inf,rots[2])).T /(omega*R) # rotor advance ratio (page 99 Leishman)
V_prop = np.atleast_2d(np.sqrt((V_inf[:,0] + mean_induced_velocity)**2 + (V_inf[:,2])**2)).T
# wake skew angle
wake_skew_angle = -np.arctan(mu_prop/lambda_tot)
# reshape gamma to find the average between stations
gamma_new = np.zeros((m,(Nr-1),Na)) # [control points, Nr-1, Na ] one less radial station because ring
gamma_new = (gamma[:,:-1,:] + gamma[:,1:,:])*0.5
num = int(Na/B)
time_idx = np.arange(nts)
t_idx = np.atleast_2d(time_idx).T
B_idx = np.arange(B)
B_loc = (B_idx*num + t_idx)%Na
Gamma = gamma_new[:,:,B_loc]
Gamma = Gamma.transpose(0,3,1,2)
# --------------------------------------------------------------------------------------------------------------
# ( control point , blade number , radial location on blade , time step )
# --------------------------------------------------------------------------------------------------------------
sx_inf0 = np.multiply(V_prop*np.cos(wake_skew_angle), np.atleast_2d(ts))
sx_inf = np.repeat(np.repeat(sx_inf0[:, None, :], B, axis = 1)[:, :, None, :], Nr, axis = 2)
sy_inf0 = np.multiply(np.atleast_2d(V_inf[:,1]).T,np.atleast_2d(ts)) # = zero since no crosswind
sy_inf = np.repeat(np.repeat(sy_inf0[:, None, :], B, axis = 1)[:, :, None, :], Nr, axis = 2)
sz_inf0 = np.multiply(V_prop*np.sin(wake_skew_angle),np.atleast_2d(ts))
sz_inf = np.repeat(np.repeat(sz_inf0[:, None, :], B, axis = 1)[:, :, None, :], Nr, axis = 2)
angle_offset = np.repeat(np.repeat(np.multiply(omega,np.atleast_2d(ts))[:, None, :],B, axis = 1)[:, :, None, :],Nr, axis = 2)
blade_angle_loc = np.repeat(np.repeat(np.tile(np.atleast_2d(blade_angles),(m,1))[:, :, None ], Nr, axis = 2)[:, :, :, None],number_of_wake_timesteps, axis = 3)
start_angle_offset = np.repeat(np.repeat(np.atleast_2d(start_angle)[:, None, :],B, axis = 1)[:, :, None, :],Nr, axis = 2)
total_angle_offset = angle_offset - start_angle_offset
if (propi.rotation != None) and (propi.rotation == 1):
total_angle_offset = -total_angle_offset
azi_y = np.sin(blade_angle_loc + total_angle_offset)
azi_z = np.cos(blade_angle_loc + total_angle_offset)
# extract airfoil trailing edge coordinates for initial location of vortex wake
a_sec = propi.airfoil_geometry
a_secl = propi.airfoil_polar_stations
airfoil_data = import_airfoil_geometry(a_sec,npoints=100)
# trailing edge points in airfoil coordinates
xupper = np.take(airfoil_data.x_upper_surface,a_secl,axis=0)
yupper = np.take(airfoil_data.y_upper_surface,a_secl,axis=0)
# Align the quarter chords of the airfoils (zero sweep)
airfoil_le_offset = (c[0]/2 - c/2 )
xte_airfoils = xupper[:,-1]*c + airfoil_le_offset
yte_airfoils = yupper[:,-1]*c
xle_airfoils = xupper[:,0]*c + airfoil_le_offset
yle_airfoils = yupper[:,0]*c
x_c_4_airfoils = (xle_airfoils - xte_airfoils)/4 - airfoil_le_offset
y_c_4_airfoils = (yle_airfoils - yte_airfoils)/4
x_cp_airfoils = 1*(xle_airfoils - xte_airfoils)/2 - airfoil_le_offset
y_cp_airfoils = 1*(yle_airfoils - yte_airfoils)/2
# apply blade twist rotation along rotor radius
beta = propi.twist_distribution
xte_twisted = np.cos(beta)*xte_airfoils - np.sin(beta)*yte_airfoils
yte_twisted = np.sin(beta)*xte_airfoils + np.cos(beta)*yte_airfoils
x_c_4_twisted = np.cos(beta)*x_c_4_airfoils - np.sin(beta)*y_c_4_airfoils
y_c_4_twisted = np.sin(beta)*x_c_4_airfoils + np.cos(beta)*y_c_4_airfoils
x_cp_twisted = np.cos(beta)*x_cp_airfoils - np.sin(beta)*y_cp_airfoils
y_cp_twisted = np.sin(beta)*x_cp_airfoils + np.cos(beta)*y_cp_airfoils
# transform coordinates from airfoil frame to rotor frame
xte = np.tile(np.atleast_2d(yte_twisted), (B,1))
xte_rotor = np.tile(xte[None,:,:,None], (m,1,1,number_of_wake_timesteps))
yte_rotor = -np.tile(xte_twisted[None,None,:,None],(m,B,1,1))*np.cos(blade_angle_loc+total_angle_offset)
zte_rotor = np.tile(xte_twisted[None,None,:,None],(m,B,1,1))*np.sin(blade_angle_loc+total_angle_offset)
r_4d = np.tile(r[None,None,:,None], (m,B,1,number_of_wake_timesteps))
x0 = 0
y0 = r_4d*azi_y
z0 = r_4d*azi_z
x_pts0 = x0 + xte_rotor
y_pts0 = y0 + yte_rotor
z_pts0 = z0 + zte_rotor
x_c_4_rotor = x0 - np.tile(y_c_4_twisted[None,None,:,None], (m,B,1,number_of_wake_timesteps))
y_c_4_rotor = y0 + np.tile(x_c_4_twisted[None,None,:,None], (m,B,1,number_of_wake_timesteps))*np.cos(blade_angle_loc+total_angle_offset)
z_c_4_rotor = z0 - np.tile(x_c_4_twisted[None,None,:,None], (m,B,1,number_of_wake_timesteps))*np.sin(blade_angle_loc+total_angle_offset)
x_cp_rotor = x0 - np.tile(y_cp_twisted[None,None,:,None], (m,B,1,number_of_wake_timesteps))
y_cp_rotor = y0 + np.tile(x_cp_twisted[None,None,:,None], (m,B,1,number_of_wake_timesteps))*np.cos(blade_angle_loc+total_angle_offset)
z_cp_rotor = z0 - np.tile(x_cp_twisted[None,None,:,None], (m,B,1,number_of_wake_timesteps))*np.sin(blade_angle_loc+total_angle_offset)
# compute wake contraction, apply to y-z plane
X_pts0 = x_pts0 + sx_inf
wake_contraction = compute_wake_contraction_matrix(i,propi,Nr,m,number_of_wake_timesteps,X_pts0,propi_outputs)
Y_pts0 = y_pts0*wake_contraction + sy_inf
Z_pts0 = z_pts0*wake_contraction + sz_inf
# Rotate wake by thrust angle
rot_to_body = propi.prop_vel_to_body() # rotate points into the body frame: [Z,Y,X]' = R*[Z,Y,X]
# append propeller wake to each of its repeated origins
X_pts = propi.origin[0][0] + X_pts0*rot_to_body[2,2] + Z_pts0*rot_to_body[2,0]
Y_pts = propi.origin[0][1] + Y_pts0*rot_to_body[1,1]
Z_pts = propi.origin[0][2] + Z_pts0*rot_to_body[0,0] + X_pts0*rot_to_body[0,2]
#------------------------------------------------------
# Account for lifting line panels
#------------------------------------------------------
rots = np.array([[np.cos(alpha), 0, np.sin(alpha)], [0,1,0], [-np.sin(alpha), 0, np.cos(alpha)]])
# rotate rotor points to incidence angle
x_c_4 = x_c_4_rotor*rots[0,0] + y_c_4_rotor*rots[0,1] + z_c_4_rotor*rots[0,2]
y_c_4 = x_c_4_rotor*rots[1,0] + y_c_4_rotor*rots[1,1] + z_c_4_rotor*rots[1,2]
z_c_4 = x_c_4_rotor*rots[2,0] + y_c_4_rotor*rots[2,1] + z_c_4_rotor*rots[2,2]
# prepend points at quarter chord to account for rotor lifting line
X_pts = np.append(x_c_4[:,:,:,0][:,:,:,None], X_pts, axis=3)
Y_pts = np.append(y_c_4[:,:,:,0][:,:,:,None], Y_pts, axis=3)
Z_pts = np.append(z_c_4[:,:,:,0][:,:,:,None], Z_pts, axis=3)
#------------------------------------------------------
# Store points
#------------------------------------------------------
# ( control point, prop , blade number , location on blade, time step )
if (propi.rotation != None) and (propi.rotation == -1):
Wmid.WD_XA1[:,i,0:B,:,:] = X_pts[: , : , :-1 , :-1 ]
Wmid.WD_YA1[:,i,0:B,:,:] = Y_pts[: , : , :-1 , :-1 ]
Wmid.WD_ZA1[:,i,0:B,:,:] = Z_pts[: , : , :-1 , :-1 ]
Wmid.WD_XA2[:,i,0:B,:,:] = X_pts[: , : , :-1 , 1: ]
Wmid.WD_YA2[:,i,0:B,:,:] = Y_pts[: , : , :-1 , 1: ]
Wmid.WD_ZA2[:,i,0:B,:,:] = Z_pts[: , : , :-1 , 1: ]
Wmid.WD_XB1[:,i,0:B,:,:] = X_pts[: , : , 1: , :-1 ]
Wmid.WD_YB1[:,i,0:B,:,:] = Y_pts[: , : , 1: , :-1 ]
Wmid.WD_ZB1[:,i,0:B,:,:] = Z_pts[: , : , 1: , :-1 ]
Wmid.WD_XB2[:,i,0:B,:,:] = X_pts[: , : , 1: , 1: ]
Wmid.WD_YB2[:,i,0:B,:,:] = Y_pts[: , : , 1: , 1: ]
Wmid.WD_ZB2[:,i,0:B,:,:] = Z_pts[: , : , 1: , 1: ]
else:
Wmid.WD_XA1[:,i,0:B,:,:] = X_pts[: , : , 1: , :-1 ]
Wmid.WD_YA1[:,i,0:B,:,:] = Y_pts[: , : , 1: , :-1 ]
Wmid.WD_ZA1[:,i,0:B,:,:] = Z_pts[: , : , 1: , :-1 ]
Wmid.WD_XA2[:,i,0:B,:,:] = X_pts[: , : , 1: , 1: ]
Wmid.WD_YA2[:,i,0:B,:,:] = Y_pts[: , : , 1: , 1: ]
Wmid.WD_ZA2[:,i,0:B,:,:] = Z_pts[: , : , 1: , 1: ]
Wmid.WD_XB1[:,i,0:B,:,:] = X_pts[: , : , :-1 , :-1 ]
Wmid.WD_YB1[:,i,0:B,:,:] = Y_pts[: , : , :-1 , :-1 ]
Wmid.WD_ZB1[:,i,0:B,:,:] = Z_pts[: , : , :-1 , :-1 ]
Wmid.WD_XB2[:,i,0:B,:,:] = X_pts[: , : , :-1 , 1: ]
Wmid.WD_YB2[:,i,0:B,:,:] = Y_pts[: , : , :-1 , 1: ]
Wmid.WD_ZB2[:,i,0:B,:,:] = Z_pts[: , : , :-1 , 1: ]
Wmid.WD_GAMMA[:,i,0:B,:,:] = Gamma
# store points for plotting
VD.Wake.XA1[:,i,0:B,:,:] = X_pts[: , : , :-1 , :-1 ]
VD.Wake.YA1[:,i,0:B,:,:] = Y_pts[: , : , :-1 , :-1 ]
VD.Wake.ZA1[:,i,0:B,:,:] = Z_pts[: , : , :-1 , :-1 ]
VD.Wake.XA2[:,i,0:B,:,:] = X_pts[: , : , :-1 , 1: ]
VD.Wake.YA2[:,i,0:B,:,:] = Y_pts[: , : , :-1 , 1: ]
VD.Wake.ZA2[:,i,0:B,:,:] = Z_pts[: , : , :-1 , 1: ]
VD.Wake.XB1[:,i,0:B,:,:] = X_pts[: , : , 1: , :-1 ]
VD.Wake.YB1[:,i,0:B,:,:] = Y_pts[: , : , 1: , :-1 ]
VD.Wake.ZB1[:,i,0:B,:,:] = Z_pts[: , : , 1: , :-1 ]
VD.Wake.XB2[:,i,0:B,:,:] = X_pts[: , : , 1: , 1: ]
VD.Wake.YB2[:,i,0:B,:,:] = Y_pts[: , : , 1: , 1: ]
VD.Wake.ZB2[:,i,0:B,:,:] = Z_pts[: , : , 1: , 1: ]
# Append wake geometry and vortex strengths to each individual propeller
propi.Wake_VD.XA1 = VD.Wake.XA1[:,i,0:B,:,:]
propi.Wake_VD.YA1 = VD.Wake.YA1[:,i,0:B,:,:]
propi.Wake_VD.ZA1 = VD.Wake.ZA1[:,i,0:B,:,:]
propi.Wake_VD.XA2 = VD.Wake.XA2[:,i,0:B,:,:]
propi.Wake_VD.YA2 = VD.Wake.YA2[:,i,0:B,:,:]
propi.Wake_VD.ZA2 = VD.Wake.ZA2[:,i,0:B,:,:]
propi.Wake_VD.XB1 = VD.Wake.XB1[:,i,0:B,:,:]
propi.Wake_VD.YB1 = VD.Wake.YB1[:,i,0:B,:,:]
propi.Wake_VD.ZB1 = VD.Wake.ZB1[:,i,0:B,:,:]
propi.Wake_VD.XB2 = VD.Wake.XB2[:,i,0:B,:,:]
propi.Wake_VD.YB2 = VD.Wake.YB2[:,i,0:B,:,:]
propi.Wake_VD.ZB2 = VD.Wake.ZB2[:,i,0:B,:,:]
propi.Wake_VD.GAMMA = Wmid.WD_GAMMA[:,i,0:B,:,:]
# append trailing edge locations
propi.Wake_VD.Xblades_te = X_pts[0,:,:,0]
propi.Wake_VD.Yblades_te = Y_pts[0,:,:,0]
propi.Wake_VD.Zblades_te = Z_pts[0,:,:,0]
# append quarter chord lifting line point locations
propi.Wake_VD.Xblades_c_4 = x_c_4_rotor
propi.Wake_VD.Yblades_c_4 = y_c_4_rotor
propi.Wake_VD.Zblades_c_4 = z_c_4_rotor
# append three-quarter chord evaluation point locations
propi.Wake_VD.Xblades_cp = x_cp_rotor #
propi.Wake_VD.Yblades_cp = y_cp_rotor #
propi.Wake_VD.Zblades_cp = z_cp_rotor #
propi.Wake_VD.Xblades_cp2 = X_pts[0,:,:,0] + (X_pts[0,:,:,0]-X_pts[0,:,:,1])/2
propi.Wake_VD.Yblades_cp2 = Y_pts[0,:,:,0] + (Y_pts[0,:,:,0]-Y_pts[0,:,:,1])/2
propi.Wake_VD.Zblades_cp2 = Z_pts[0,:,:,0] + (Z_pts[0,:,:,0]-Z_pts[0,:,:,1])/2
# Compress Data into 1D Arrays
mat6_size = (m,num_prop*nts*Bmax*nmax)
WD.XA1 = np.reshape(Wmid.WD_XA1,mat6_size)
WD.YA1 = np.reshape(Wmid.WD_YA1,mat6_size)
WD.ZA1 = np.reshape(Wmid.WD_ZA1,mat6_size)
WD.XA2 = np.reshape(Wmid.WD_XA2,mat6_size)
WD.YA2 = np.reshape(Wmid.WD_YA2,mat6_size)
WD.ZA2 = np.reshape(Wmid.WD_ZA2,mat6_size)
WD.XB1 = np.reshape(Wmid.WD_XB1,mat6_size)
WD.YB1 = np.reshape(Wmid.WD_YB1,mat6_size)
WD.ZB1 = np.reshape(Wmid.WD_ZB1,mat6_size)
WD.XB2 = np.reshape(Wmid.WD_XB2,mat6_size)
WD.YB2 = np.reshape(Wmid.WD_YB2,mat6_size)
WD.ZB2 = np.reshape(Wmid.WD_ZB2,mat6_size)
WD.GAMMA = np.reshape(Wmid.WD_GAMMA,mat6_size)
return WD, dt, ts, B, Nr
def initialize_distributions(nmax, Bmax, n_wts, n_props, m,VD):
Wmid = Data()
mat1_size = (m,n_props,Bmax,nmax, n_wts)
Wmid.WD_XA1 = np.zeros(mat1_size)
Wmid.WD_YA1 = np.zeros(mat1_size)
Wmid.WD_ZA1 = np.zeros(mat1_size)
Wmid.WD_XA2 = np.zeros(mat1_size)
Wmid.WD_YA2 = np.zeros(mat1_size)
Wmid.WD_ZA2 = np.zeros(mat1_size)
Wmid.WD_XB1 = np.zeros(mat1_size)
Wmid.WD_YB1 = np.zeros(mat1_size)
Wmid.WD_ZB1 = np.zeros(mat1_size)
Wmid.WD_XB2 = np.zeros(mat1_size)
Wmid.WD_YB2 = np.zeros(mat1_size)
Wmid.WD_ZB2 = np.zeros(mat1_size)
Wmid.WD_GAMMA = np.zeros(mat1_size)
WD = Data()
mat2_size = (m,n_props*n_wts*Bmax*nmax)
WD.XA1 = np.zeros(mat2_size)
WD.YA1 = np.zeros(mat2_size)
WD.ZA1 = np.zeros(mat2_size)
WD.XA2 = np.zeros(mat2_size)
WD.YA2 = np.zeros(mat2_size)
WD.ZA2 = np.zeros(mat2_size)
WD.XB1 = np.zeros(mat2_size)
WD.YB1 = np.zeros(mat2_size)
WD.ZB1 = np.zeros(mat2_size)
WD.XB2 = np.zeros(mat2_size)
WD.YB2 = np.zeros(mat2_size)
WD.ZB2 = np.zeros(mat2_size)
VD.Wake = Data()
mat3_size = (m,n_props,Bmax,nmax,n_wts)
VD.Wake.XA1 = np.zeros(mat3_size)
VD.Wake.YA1 = np.zeros(mat3_size)
VD.Wake.ZA1 = np.zeros(mat3_size)
VD.Wake.XA2 = np.zeros(mat3_size)
VD.Wake.YA2 = np.zeros(mat3_size)
VD.Wake.ZA2 = np.zeros(mat3_size)
VD.Wake.XB1 = np.zeros(mat3_size)
VD.Wake.YB1 = np.zeros(mat3_size)
VD.Wake.ZB1 = np.zeros(mat3_size)
VD.Wake.XB2 = np.zeros(mat3_size)
VD.Wake.YB2 = np.zeros(mat3_size)
VD.Wake.ZB2 = np.zeros(mat3_size)
return VD, WD, Wmid
|
[
"numpy.dot",
"SUAVE.Core.Data",
"numpy.zeros",
"SUAVE.Methods.Aerodynamics.Common.Fidelity_Zero.Lift.compute_wake_contraction_matrix.compute_wake_contraction_matrix",
"numpy.sqrt",
"numpy.append",
"numpy.sin",
"numpy.arange",
"numpy.reshape",
"numpy.linspace",
"numpy.cos",
"numpy.take",
"SUAVE.Methods.Geometry.Two_Dimensional.Cross_Section.Airfoil.import_airfoil_geometry.import_airfoil_geometry",
"numpy.tile",
"numpy.mean",
"numpy.arctan",
"numpy.repeat",
"numpy.atleast_2d"
] |
[((17107, 17141), 'numpy.reshape', 'np.reshape', (['Wmid.WD_XA1', 'mat6_size'], {}), '(Wmid.WD_XA1, mat6_size)\n', (17117, 17141), True, 'import numpy as np\n'), ((17158, 17192), 'numpy.reshape', 'np.reshape', (['Wmid.WD_YA1', 'mat6_size'], {}), '(Wmid.WD_YA1, mat6_size)\n', (17168, 17192), True, 'import numpy as np\n'), ((17209, 17243), 'numpy.reshape', 'np.reshape', (['Wmid.WD_ZA1', 'mat6_size'], {}), '(Wmid.WD_ZA1, mat6_size)\n', (17219, 17243), True, 'import numpy as np\n'), ((17260, 17294), 'numpy.reshape', 'np.reshape', (['Wmid.WD_XA2', 'mat6_size'], {}), '(Wmid.WD_XA2, mat6_size)\n', (17270, 17294), True, 'import numpy as np\n'), ((17311, 17345), 'numpy.reshape', 'np.reshape', (['Wmid.WD_YA2', 'mat6_size'], {}), '(Wmid.WD_YA2, mat6_size)\n', (17321, 17345), True, 'import numpy as np\n'), ((17362, 17396), 'numpy.reshape', 'np.reshape', (['Wmid.WD_ZA2', 'mat6_size'], {}), '(Wmid.WD_ZA2, mat6_size)\n', (17372, 17396), True, 'import numpy as np\n'), ((17413, 17447), 'numpy.reshape', 'np.reshape', (['Wmid.WD_XB1', 'mat6_size'], {}), '(Wmid.WD_XB1, mat6_size)\n', (17423, 17447), True, 'import numpy as np\n'), ((17464, 17498), 'numpy.reshape', 'np.reshape', (['Wmid.WD_YB1', 'mat6_size'], {}), '(Wmid.WD_YB1, mat6_size)\n', (17474, 17498), True, 'import numpy as np\n'), ((17515, 17549), 'numpy.reshape', 'np.reshape', (['Wmid.WD_ZB1', 'mat6_size'], {}), '(Wmid.WD_ZB1, mat6_size)\n', (17525, 17549), True, 'import numpy as np\n'), ((17566, 17600), 'numpy.reshape', 'np.reshape', (['Wmid.WD_XB2', 'mat6_size'], {}), '(Wmid.WD_XB2, mat6_size)\n', (17576, 17600), True, 'import numpy as np\n'), ((17617, 17651), 'numpy.reshape', 'np.reshape', (['Wmid.WD_YB2', 'mat6_size'], {}), '(Wmid.WD_YB2, mat6_size)\n', (17627, 17651), True, 'import numpy as np\n'), ((17668, 17702), 'numpy.reshape', 'np.reshape', (['Wmid.WD_ZB2', 'mat6_size'], {}), '(Wmid.WD_ZB2, mat6_size)\n', (17678, 17702), True, 'import numpy as np\n'), ((17719, 17755), 'numpy.reshape', 'np.reshape', (['Wmid.WD_GAMMA', 'mat6_size'], {}), '(Wmid.WD_GAMMA, mat6_size)\n', (17729, 17755), True, 'import numpy as np\n'), ((17875, 17881), 'SUAVE.Core.Data', 'Data', ([], {}), '()\n', (17879, 17881), False, 'from SUAVE.Core import Data\n'), ((17948, 17967), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (17956, 17967), True, 'import numpy as np\n'), ((17991, 18010), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (17999, 18010), True, 'import numpy as np\n'), ((18034, 18053), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18042, 18053), True, 'import numpy as np\n'), ((18077, 18096), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18085, 18096), True, 'import numpy as np\n'), ((18120, 18139), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18128, 18139), True, 'import numpy as np\n'), ((18163, 18182), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18171, 18182), True, 'import numpy as np\n'), ((18210, 18229), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18218, 18229), True, 'import numpy as np\n'), ((18253, 18272), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18261, 18272), True, 'import numpy as np\n'), ((18296, 18315), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18304, 18315), True, 'import numpy as np\n'), ((18339, 18358), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18347, 18358), True, 'import numpy as np\n'), ((18383, 18402), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18391, 18402), True, 'import numpy as np\n'), ((18427, 18446), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18435, 18446), True, 'import numpy as np\n'), ((18473, 18492), 'numpy.zeros', 'np.zeros', (['mat1_size'], {}), '(mat1_size)\n', (18481, 18492), True, 'import numpy as np\n'), ((18515, 18521), 'SUAVE.Core.Data', 'Data', ([], {}), '()\n', (18519, 18521), False, 'from SUAVE.Core import Data\n'), ((18582, 18601), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18590, 18601), True, 'import numpy as np\n'), ((18618, 18637), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18626, 18637), True, 'import numpy as np\n'), ((18654, 18673), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18662, 18673), True, 'import numpy as np\n'), ((18690, 18709), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18698, 18709), True, 'import numpy as np\n'), ((18726, 18745), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18734, 18745), True, 'import numpy as np\n'), ((18762, 18781), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18770, 18781), True, 'import numpy as np\n'), ((18801, 18820), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18809, 18820), True, 'import numpy as np\n'), ((18837, 18856), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18845, 18856), True, 'import numpy as np\n'), ((18873, 18892), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18881, 18892), True, 'import numpy as np\n'), ((18909, 18928), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18917, 18928), True, 'import numpy as np\n'), ((18945, 18964), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18953, 18964), True, 'import numpy as np\n'), ((18981, 19000), 'numpy.zeros', 'np.zeros', (['mat2_size'], {}), '(mat2_size)\n', (18989, 19000), True, 'import numpy as np\n'), ((19023, 19029), 'SUAVE.Core.Data', 'Data', ([], {}), '()\n', (19027, 19029), False, 'from SUAVE.Core import Data\n'), ((19098, 19117), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19106, 19117), True, 'import numpy as np\n'), ((19139, 19158), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19147, 19158), True, 'import numpy as np\n'), ((19180, 19199), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19188, 19199), True, 'import numpy as np\n'), ((19221, 19240), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19229, 19240), True, 'import numpy as np\n'), ((19262, 19281), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19270, 19281), True, 'import numpy as np\n'), ((19303, 19322), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19311, 19322), True, 'import numpy as np\n'), ((19347, 19366), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19355, 19366), True, 'import numpy as np\n'), ((19388, 19407), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19396, 19407), True, 'import numpy as np\n'), ((19429, 19448), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19437, 19448), True, 'import numpy as np\n'), ((19470, 19489), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19478, 19489), True, 'import numpy as np\n'), ((19511, 19530), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19519, 19530), True, 'import numpy as np\n'), ((19552, 19571), 'numpy.zeros', 'np.zeros', (['mat3_size'], {}), '(mat3_size)\n', (19560, 19571), True, 'import numpy as np\n'), ((4270, 4316), 'numpy.linspace', 'np.linspace', (['(0)', 'time', 'number_of_wake_timesteps'], {}), '(0, time, number_of_wake_timesteps)\n', (4281, 4316), True, 'import numpy as np\n'), ((5318, 5343), 'numpy.zeros', 'np.zeros', (['(m, Nr - 1, Na)'], {}), '((m, Nr - 1, Na))\n', (5326, 5343), True, 'import numpy as np\n'), ((5544, 5558), 'numpy.arange', 'np.arange', (['nts'], {}), '(nts)\n', (5553, 5558), True, 'import numpy as np\n'), ((5626, 5638), 'numpy.arange', 'np.arange', (['B'], {}), '(B)\n', (5635, 5638), True, 'import numpy as np\n'), ((7498, 7542), 'numpy.sin', 'np.sin', (['(blade_angle_loc + total_angle_offset)'], {}), '(blade_angle_loc + total_angle_offset)\n', (7504, 7542), True, 'import numpy as np\n'), ((7563, 7607), 'numpy.cos', 'np.cos', (['(blade_angle_loc + total_angle_offset)'], {}), '(blade_angle_loc + total_angle_offset)\n', (7569, 7607), True, 'import numpy as np\n'), ((7830, 7873), 'SUAVE.Methods.Geometry.Two_Dimensional.Cross_Section.Airfoil.import_airfoil_geometry.import_airfoil_geometry', 'import_airfoil_geometry', (['a_sec'], {'npoints': '(100)'}), '(a_sec, npoints=100)\n', (7853, 7873), False, 'from SUAVE.Methods.Geometry.Two_Dimensional.Cross_Section.Airfoil.import_airfoil_geometry import import_airfoil_geometry\n'), ((7962, 8015), 'numpy.take', 'np.take', (['airfoil_data.x_upper_surface', 'a_secl'], {'axis': '(0)'}), '(airfoil_data.x_upper_surface, a_secl, axis=0)\n', (7969, 8015), True, 'import numpy as np\n'), ((8042, 8095), 'numpy.take', 'np.take', (['airfoil_data.y_upper_surface', 'a_secl'], {'axis': '(0)'}), '(airfoil_data.y_upper_surface, a_secl, axis=0)\n', (8049, 8095), True, 'import numpy as np\n'), ((9471, 9538), 'numpy.tile', 'np.tile', (['xte[None, :, :, None]', '(m, 1, 1, number_of_wake_timesteps)'], {}), '(xte[None, :, :, None], (m, 1, 1, number_of_wake_timesteps))\n', (9478, 9538), True, 'import numpy as np\n'), ((9783, 9851), 'numpy.tile', 'np.tile', (['r[None, None, :, None]', '(m, B, 1, number_of_wake_timesteps)'], {}), '(r[None, None, :, None], (m, B, 1, number_of_wake_timesteps))\n', (9790, 9851), True, 'import numpy as np\n'), ((10953, 11054), 'SUAVE.Methods.Aerodynamics.Common.Fidelity_Zero.Lift.compute_wake_contraction_matrix.compute_wake_contraction_matrix', 'compute_wake_contraction_matrix', (['i', 'propi', 'Nr', 'm', 'number_of_wake_timesteps', 'X_pts0', 'propi_outputs'], {}), '(i, propi, Nr, m, number_of_wake_timesteps,\n X_pts0, propi_outputs)\n', (10984, 11054), False, 'from SUAVE.Methods.Aerodynamics.Common.Fidelity_Zero.Lift.compute_wake_contraction_matrix import compute_wake_contraction_matrix\n'), ((12375, 12433), 'numpy.append', 'np.append', (['x_c_4[:, :, :, 0][:, :, :, None]', 'X_pts'], {'axis': '(3)'}), '(x_c_4[:, :, :, 0][:, :, :, None], X_pts, axis=3)\n', (12384, 12433), True, 'import numpy as np\n'), ((12444, 12502), 'numpy.append', 'np.append', (['y_c_4[:, :, :, 0][:, :, :, None]', 'Y_pts'], {'axis': '(3)'}), '(y_c_4[:, :, :, 0][:, :, :, None], Y_pts, axis=3)\n', (12453, 12502), True, 'import numpy as np\n'), ((12513, 12571), 'numpy.append', 'np.append', (['z_c_4[:, :, :, 0][:, :, :, None]', 'Z_pts'], {'axis': '(3)'}), '(z_c_4[:, :, :, 0][:, :, :, None], Z_pts, axis=3)\n', (12522, 12571), True, 'import numpy as np\n'), ((4151, 4183), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(B + 1)'], {}), '(0, 2 * np.pi, B + 1)\n', (4162, 4183), True, 'import numpy as np\n'), ((4537, 4556), 'numpy.mean', 'np.mean', (['va'], {'axis': '(1)'}), '(va, axis=1)\n', (4544, 4556), True, 'import numpy as np\n'), ((5197, 5228), 'numpy.arctan', 'np.arctan', (['(mu_prop / lambda_tot)'], {}), '(mu_prop / lambda_tot)\n', (5206, 5228), True, 'import numpy as np\n'), ((5579, 5602), 'numpy.atleast_2d', 'np.atleast_2d', (['time_idx'], {}), '(time_idx)\n', (5592, 5602), True, 'import numpy as np\n'), ((6191, 6208), 'numpy.atleast_2d', 'np.atleast_2d', (['ts'], {}), '(ts)\n', (6204, 6208), True, 'import numpy as np\n'), ((6420, 6437), 'numpy.atleast_2d', 'np.atleast_2d', (['ts'], {}), '(ts)\n', (6433, 6437), True, 'import numpy as np\n'), ((6682, 6699), 'numpy.atleast_2d', 'np.atleast_2d', (['ts'], {}), '(ts)\n', (6695, 6699), True, 'import numpy as np\n'), ((9416, 9442), 'numpy.atleast_2d', 'np.atleast_2d', (['yte_twisted'], {}), '(yte_twisted)\n', (9429, 9442), True, 'import numpy as np\n'), ((9603, 9647), 'numpy.cos', 'np.cos', (['(blade_angle_loc + total_angle_offset)'], {}), '(blade_angle_loc + total_angle_offset)\n', (9609, 9647), True, 'import numpy as np\n'), ((9667, 9722), 'numpy.tile', 'np.tile', (['xte_twisted[None, None, :, None]', '(m, B, 1, 1)'], {}), '(xte_twisted[None, None, :, None], (m, B, 1, 1))\n', (9674, 9722), True, 'import numpy as np\n'), ((9716, 9760), 'numpy.sin', 'np.sin', (['(blade_angle_loc + total_angle_offset)'], {}), '(blade_angle_loc + total_angle_offset)\n', (9722, 9760), True, 'import numpy as np\n'), ((10059, 10144), 'numpy.tile', 'np.tile', (['y_c_4_twisted[None, None, :, None]', '(m, B, 1, number_of_wake_timesteps)'], {}), '(y_c_4_twisted[None, None, :, None], (m, B, 1, number_of_wake_timesteps)\n )\n', (10066, 10144), True, 'import numpy as np\n'), ((10454, 10533), 'numpy.tile', 'np.tile', (['y_cp_twisted[None, None, :, None]', '(m, B, 1, number_of_wake_timesteps)'], {}), '(y_cp_twisted[None, None, :, None], (m, B, 1, number_of_wake_timesteps))\n', (10461, 10533), True, 'import numpy as np\n'), ((5071, 5141), 'numpy.sqrt', 'np.sqrt', (['((V_inf[:, 0] + mean_induced_velocity) ** 2 + V_inf[:, 2] ** 2)'], {}), '((V_inf[:, 0] + mean_induced_velocity) ** 2 + V_inf[:, 2] ** 2)\n', (5078, 5141), True, 'import numpy as np\n'), ((6166, 6189), 'numpy.cos', 'np.cos', (['wake_skew_angle'], {}), '(wake_skew_angle)\n', (6172, 6189), True, 'import numpy as np\n'), ((6249, 6290), 'numpy.repeat', 'np.repeat', (['sx_inf0[:, None, :]', 'B'], {'axis': '(1)'}), '(sx_inf0[:, None, :], B, axis=1)\n', (6258, 6290), True, 'import numpy as np\n'), ((6392, 6418), 'numpy.atleast_2d', 'np.atleast_2d', (['V_inf[:, 1]'], {}), '(V_inf[:, 1])\n', (6405, 6418), True, 'import numpy as np\n'), ((6506, 6547), 'numpy.repeat', 'np.repeat', (['sy_inf0[:, None, :]', 'B'], {'axis': '(1)'}), '(sy_inf0[:, None, :], B, axis=1)\n', (6515, 6547), True, 'import numpy as np\n'), ((6658, 6681), 'numpy.sin', 'np.sin', (['wake_skew_angle'], {}), '(wake_skew_angle)\n', (6664, 6681), True, 'import numpy as np\n'), ((6740, 6781), 'numpy.repeat', 'np.repeat', (['sz_inf0[:, None, :]', 'B'], {'axis': '(1)'}), '(sz_inf0[:, None, :], B, axis=1)\n', (6749, 6781), True, 'import numpy as np\n'), ((8831, 8843), 'numpy.cos', 'np.cos', (['beta'], {}), '(beta)\n', (8837, 8843), True, 'import numpy as np\n'), ((8859, 8871), 'numpy.sin', 'np.sin', (['beta'], {}), '(beta)\n', (8865, 8871), True, 'import numpy as np\n'), ((8915, 8927), 'numpy.sin', 'np.sin', (['beta'], {}), '(beta)\n', (8921, 8927), True, 'import numpy as np\n'), ((8943, 8955), 'numpy.cos', 'np.cos', (['beta'], {}), '(beta)\n', (8949, 8955), True, 'import numpy as np\n'), ((9006, 9018), 'numpy.cos', 'np.cos', (['beta'], {}), '(beta)\n', (9012, 9018), True, 'import numpy as np\n'), ((9036, 9048), 'numpy.sin', 'np.sin', (['beta'], {}), '(beta)\n', (9042, 9048), True, 'import numpy as np\n'), ((9089, 9101), 'numpy.sin', 'np.sin', (['beta'], {}), '(beta)\n', (9095, 9101), True, 'import numpy as np\n'), ((9119, 9131), 'numpy.cos', 'np.cos', (['beta'], {}), '(beta)\n', (9125, 9131), True, 'import numpy as np\n'), ((9181, 9193), 'numpy.cos', 'np.cos', (['beta'], {}), '(beta)\n', (9187, 9193), True, 'import numpy as np\n'), ((9210, 9222), 'numpy.sin', 'np.sin', (['beta'], {}), '(beta)\n', (9216, 9222), True, 'import numpy as np\n'), ((9261, 9273), 'numpy.sin', 'np.sin', (['beta'], {}), '(beta)\n', (9267, 9273), True, 'import numpy as np\n'), ((9290, 9302), 'numpy.cos', 'np.cos', (['beta'], {}), '(beta)\n', (9296, 9302), True, 'import numpy as np\n'), ((9554, 9609), 'numpy.tile', 'np.tile', (['xte_twisted[None, None, :, None]', '(m, B, 1, 1)'], {}), '(xte_twisted[None, None, :, None], (m, B, 1, 1))\n', (9561, 9609), True, 'import numpy as np\n'), ((10161, 10246), 'numpy.tile', 'np.tile', (['x_c_4_twisted[None, None, :, None]', '(m, B, 1, number_of_wake_timesteps)'], {}), '(x_c_4_twisted[None, None, :, None], (m, B, 1, number_of_wake_timesteps)\n )\n', (10168, 10246), True, 'import numpy as np\n'), ((10236, 10280), 'numpy.cos', 'np.cos', (['(blade_angle_loc + total_angle_offset)'], {}), '(blade_angle_loc + total_angle_offset)\n', (10242, 10280), True, 'import numpy as np\n'), ((10306, 10391), 'numpy.tile', 'np.tile', (['x_c_4_twisted[None, None, :, None]', '(m, B, 1, number_of_wake_timesteps)'], {}), '(x_c_4_twisted[None, None, :, None], (m, B, 1, number_of_wake_timesteps)\n )\n', (10313, 10391), True, 'import numpy as np\n'), ((10381, 10425), 'numpy.sin', 'np.sin', (['(blade_angle_loc + total_angle_offset)'], {}), '(blade_angle_loc + total_angle_offset)\n', (10387, 10425), True, 'import numpy as np\n'), ((10556, 10635), 'numpy.tile', 'np.tile', (['x_cp_twisted[None, None, :, None]', '(m, B, 1, number_of_wake_timesteps)'], {}), '(x_cp_twisted[None, None, :, None], (m, B, 1, number_of_wake_timesteps))\n', (10563, 10635), True, 'import numpy as np\n'), ((10631, 10675), 'numpy.cos', 'np.cos', (['(blade_angle_loc + total_angle_offset)'], {}), '(blade_angle_loc + total_angle_offset)\n', (10637, 10675), True, 'import numpy as np\n'), ((10701, 10780), 'numpy.tile', 'np.tile', (['x_cp_twisted[None, None, :, None]', '(m, B, 1, number_of_wake_timesteps)'], {}), '(x_cp_twisted[None, None, :, None], (m, B, 1, number_of_wake_timesteps))\n', (10708, 10780), True, 'import numpy as np\n'), ((10776, 10820), 'numpy.sin', 'np.sin', (['(blade_angle_loc + total_angle_offset)'], {}), '(blade_angle_loc + total_angle_offset)\n', (10782, 10820), True, 'import numpy as np\n'), ((4653, 4666), 'numpy.cos', 'np.cos', (['alpha'], {}), '(alpha)\n', (4659, 4666), True, 'import numpy as np\n'), ((4671, 4684), 'numpy.sin', 'np.sin', (['alpha'], {}), '(alpha)\n', (4677, 4684), True, 'import numpy as np\n'), ((4716, 4729), 'numpy.cos', 'np.cos', (['alpha'], {}), '(alpha)\n', (4722, 4729), True, 'import numpy as np\n'), ((4925, 4947), 'numpy.dot', 'np.dot', (['V_inf', 'rots[2]'], {}), '(V_inf, rots[2])\n', (4931, 4947), True, 'import numpy as np\n'), ((11866, 11879), 'numpy.cos', 'np.cos', (['alpha'], {}), '(alpha)\n', (11872, 11879), True, 'import numpy as np\n'), ((11884, 11897), 'numpy.sin', 'np.sin', (['alpha'], {}), '(alpha)\n', (11890, 11897), True, 'import numpy as np\n'), ((11929, 11942), 'numpy.cos', 'np.cos', (['alpha'], {}), '(alpha)\n', (11935, 11942), True, 'import numpy as np\n'), ((4698, 4711), 'numpy.sin', 'np.sin', (['alpha'], {}), '(alpha)\n', (4704, 4711), True, 'import numpy as np\n'), ((4781, 4803), 'numpy.dot', 'np.dot', (['V_inf', 'rots[0]'], {}), '(V_inf, rots[0])\n', (4787, 4803), True, 'import numpy as np\n'), ((7192, 7218), 'numpy.atleast_2d', 'np.atleast_2d', (['start_angle'], {}), '(start_angle)\n', (7205, 7218), True, 'import numpy as np\n'), ((11911, 11924), 'numpy.sin', 'np.sin', (['alpha'], {}), '(alpha)\n', (11917, 11924), True, 'import numpy as np\n'), ((6897, 6914), 'numpy.atleast_2d', 'np.atleast_2d', (['ts'], {}), '(ts)\n', (6910, 6914), True, 'import numpy as np\n'), ((7028, 7055), 'numpy.atleast_2d', 'np.atleast_2d', (['blade_angles'], {}), '(blade_angles)\n', (7041, 7055), True, 'import numpy as np\n')]
|
import logging
import numpy, random, time, json, copy
import numpy as np
import os.path as osp
import torch
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader, Subset
from cords.utils.data.data_utils import WeightedSubset
from cords.utils.models import WideResNet, ShakeNet, CNN13, CNN
from cords.utils.data.datasets.SSL import utils as dataset_utils
from cords.selectionstrategies.helpers.ssl_lib.algs.builder import gen_ssl_alg
from cords.selectionstrategies.helpers.ssl_lib.algs import utils as alg_utils
from cords.utils.models import utils as model_utils
from cords.selectionstrategies.helpers.ssl_lib.consistency.builder import gen_consistency
from cords.utils.data.datasets.SSL import gen_dataset
from cords.selectionstrategies.helpers.ssl_lib.param_scheduler import scheduler
from cords.selectionstrategies.helpers.ssl_lib.misc.meter import Meter
from cords.utils.data.dataloader.SSL.adaptive import *
from cords.utils.config_utils import load_config_data
import time
import os
import sys
class TrainClassifier:
def __init__(self, config_file):
self.config_file = config_file
self.cfg = load_config_data(self.config_file)
results_dir = osp.abspath(osp.expanduser(self.cfg.train_args.results_dir))
all_logs_dir = os.path.join(results_dir, self.cfg.setting,
self.cfg.dss_args.type,
self.cfg.dataset.name,
str(self.cfg.dss_args.fraction),
str(self.cfg.dss_args.select_every))
os.makedirs(all_logs_dir, exist_ok=True)
# setup logger
plain_formatter = logging.Formatter("[%(asctime)s] %(name)s %(levelname)s: %(message)s",
datefmt="%m/%d %H:%M:%S")
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.INFO)
s_handler = logging.StreamHandler(stream=sys.stdout)
s_handler.setFormatter(plain_formatter)
s_handler.setLevel(logging.INFO)
self.logger.addHandler(s_handler)
f_handler = logging.FileHandler(os.path.join(all_logs_dir, self.cfg.dataset.name + ".log"))
f_handler.setFormatter(plain_formatter)
f_handler.setLevel(logging.DEBUG)
self.logger.addHandler(f_handler)
self.logger.propagate = False
self.logger.info(self.cfg)
"""
############################## Model Creation ##############################
"""
def gen_model(self, name, num_classes, img_size):
scale = int(np.ceil(np.log2(img_size)))
if name == "wrn":
return WideResNet(num_classes, 32, scale, 4)
elif name == "shake":
return ShakeNet(num_classes, 32, scale, 4)
elif name == "cnn13":
return CNN13(num_classes, 32)
elif name == 'cnn':
return CNN(num_classes)
else:
raise NotImplementedError
"""
############################## Model Evaluation ##############################
"""
@staticmethod
def evaluation(raw_model, eval_model, loader, device):
raw_model.eval()
eval_model.eval()
sum_raw_acc = sum_acc = sum_loss = 0
with torch.no_grad():
for (data, labels) in loader:
data, labels = data.to(device), labels.to(device)
preds = eval_model(data)
raw_preds = raw_model(data)
loss = F.cross_entropy(preds, labels)
sum_loss += loss.item()
acc = (preds.max(1)[1] == labels).float().mean()
raw_acc = (raw_preds.max(1)[1] == labels).float().mean()
sum_acc += acc.item()
sum_raw_acc += raw_acc.item()
mean_raw_acc = sum_raw_acc / len(loader)
mean_acc = sum_acc / len(loader)
mean_loss = sum_loss / len(loader)
raw_model.train()
eval_model.train()
return mean_raw_acc, mean_acc, mean_loss
"""
############################## Model Parameters Update ##############################
"""
def param_update(self,
cur_iteration,
model,
teacher_model,
optimizer,
ssl_alg,
consistency,
labeled_data,
ul_weak_data,
ul_strong_data,
labels,
average_model,
weights=None,
ood=False
):
# if ood:
# model.update_batch_stats(False)
start_time = time.time()
all_data = torch.cat([labeled_data, ul_weak_data, ul_strong_data], 0)
forward_func = model.forward
stu_logits = forward_func(all_data)
labeled_preds = stu_logits[:labeled_data.shape[0]]
stu_unlabeled_weak_logits, stu_unlabeled_strong_logits = torch.chunk(stu_logits[labels.shape[0]:], 2, dim=0)
if self.cfg.optimizer.tsa:
none_reduced_loss = F.cross_entropy(labeled_preds, labels, reduction="none")
L_supervised = alg_utils.anneal_loss(
labeled_preds, labels, none_reduced_loss, cur_iteration + 1,
self.cfg.train_args.iteration, labeled_preds.shape[1], self.cfg.optimizer.tsa_schedule)
else:
L_supervised = F.cross_entropy(labeled_preds, labels)
if self.cfg.ssl_args.coef > 0:
# get target values
if teacher_model is not None: # get target values from teacher model
t_forward_func = teacher_model.forward
tea_logits = t_forward_func(all_data)
tea_unlabeled_weak_logits, _ = torch.chunk(tea_logits[labels.shape[0]:], 2, dim=0)
else:
t_forward_func = forward_func
tea_unlabeled_weak_logits = stu_unlabeled_weak_logits
# calc consistency loss
model.update_batch_stats(False)
y, targets, mask = ssl_alg(
stu_preds=stu_unlabeled_strong_logits,
tea_logits=tea_unlabeled_weak_logits.detach(),
w_data=ul_strong_data,
subset=False,
stu_forward=forward_func,
tea_forward=t_forward_func
)
model.update_batch_stats(True)
# if not ood:
# model.update_batch_stats(True)
if weights is None:
L_consistency = consistency(y, targets, mask, weak_prediction=tea_unlabeled_weak_logits.softmax(1))
else:
L_consistency = consistency(y, targets, mask * weights,
weak_prediction=tea_unlabeled_weak_logits.softmax(1))
else:
L_consistency = torch.zeros_like(L_supervised)
mask = None
# calc total loss
coef = scheduler.exp_warmup(self.cfg.ssl_args.coef, int(self.cfg.scheduler.warmup_iter), cur_iteration + 1)
loss = L_supervised + coef * L_consistency
if self.cfg.ssl_args.em > 0:
loss -= self.cfg.ssl_args.em * \
(stu_unlabeled_weak_logits.softmax(1) * F.log_softmax(stu_unlabeled_weak_logits, 1)).sum(1).mean()
# update parameters
cur_lr = optimizer.param_groups[0]["lr"]
optimizer.zero_grad()
loss.backward()
if self.cfg.optimizer.weight_decay > 0:
decay_coeff = self.cfg.optimizer.weight_decay * cur_lr
model_utils.apply_weight_decay(model.modules(), decay_coeff)
optimizer.step()
# update teacher parameters by exponential moving average
if self.cfg.ssl_args.ema_teacher:
model_utils.ema_update(
teacher_model, model, self.cfg.ssl_args.ema_teacher_factor,
self.cfg.optimizer.weight_decay * cur_lr if self.cfg.ssl_args.ema_apply_wd else None,
cur_iteration if self.cfg.ssl_args.ema_teacher_warmup else None)
# update evaluation model's parameters by exponential moving average
if self.cfg.ssl_eval_args.weight_average:
model_utils.ema_update(
average_model, model, self.cfg.ssl_eval_args.wa_ema_factor,
self.cfg.optimizer.weight_decay * cur_lr if self.cfg.ssl_eval_args.wa_apply_wd else None)
# calculate accuracy for labeled data
acc = (labeled_preds.max(1)[1] == labels).float().mean()
return {
"acc": acc,
"loss": loss.item(),
"sup loss": L_supervised.item(),
"ssl loss": L_consistency.item(),
"mask": mask.float().mean().item() if mask is not None else 1,
"coef": coef,
"sec/iter": (time.time() - start_time)
}
"""
############################## Calculate selected ID points percentage ##############################
"""
def get_ul_ood_ratio(self, ul_dataset):
actual_lbls = ul_dataset.dataset.dataset['labels'][ul_dataset.indices]
bincnt = numpy.bincount(actual_lbls, minlength=10)
self.logger.info("Ratio of ID points selected: {0:f}".format((bincnt[:6].sum() / bincnt.sum()).item()))
"""
############################## Calculate selected ID points percentage ##############################
"""
def get_ul_classimb_ratio(self, ul_dataset):
actual_lbls = ul_dataset.dataset.dataset['labels'][ul_dataset.indices]
bincnt = numpy.bincount(actual_lbls, minlength=10)
self.logger.info("Ratio of points selected from under-represented classes: {0:f}".format(
(bincnt[:5].sum() / bincnt.sum()).item()))
"""
############################## Main File ##############################
"""
def train(self):
logger = self.logger
# set seed
torch.manual_seed(self.cfg.train_args.seed)
numpy.random.seed(self.cfg.train_args.seed)
random.seed(self.cfg.train_args.seed)
device = self.cfg.train_args.device
# build data loader
logger.info("load dataset")
lt_data, ult_data, test_data, num_classes, img_size = gen_dataset(self.cfg.dataset.root, self.cfg.dataset.name,
False, self.cfg, logger)
# set consistency type
consistency = gen_consistency(self.cfg.ssl_args.consis, self.cfg)
consistency_nored = gen_consistency(self.cfg.ssl_args.consis + '_red', self.cfg)
# set ssl algorithm
ssl_alg = gen_ssl_alg(self.cfg.ssl_args.alg, self.cfg)
# build student model
model = self.gen_model(self.cfg.model.architecture, num_classes, img_size).to(device)
# build teacher model
if self.cfg.ssl_args.ema_teacher:
teacher_model = self.gen_model(self.cfg.model.architecture, num_classes, img_size).to(device)
teacher_model.load_state_dict(model.state_dict())
else:
teacher_model = None
# for evaluation
if self.cfg.ssl_eval_args.weight_average:
average_model = self.gen_model(self.cfg.model.architecture, num_classes, img_size).to(device)
average_model.load_state_dict(model.state_dict())
else:
average_model = None
"""
Subset selection arguments
"""
if self.cfg.dss_args.type == 'Full':
max_iteration = self.cfg.train_args.iteration
else:
if self.cfg.train_args.max_iter != -1:
max_iteration = self.cfg.train_args.max_iter
else:
max_iteration = int(self.cfg.train_args.iteration * self.cfg.dss_args.fraction)
# Create Data Loaders
ult_seq_loader = DataLoader(ult_data, batch_size=self.cfg.dataloader.ul_batch_size,
shuffle=False, pin_memory=True)
lt_seq_loader = DataLoader(lt_data, batch_size=self.cfg.dataloader.l_batch_size,
shuffle=False, pin_memory=True)
test_loader = DataLoader(
test_data,
1,
shuffle=False,
drop_last=False,
num_workers=self.cfg.dataloader.num_workers
)
"""
############################## Custom Dataloader Creation ##############################
"""
if self.cfg.dss_args.type in ['GradMatch', 'GradMatchPB', 'GradMatch-Warm', 'GradMatchPB-Warm']:
"""
############################## GradMatch Dataloader Additional Arguments ##############################
"""
self.cfg.dss_args.model = model
self.cfg.dss_args.tea_model = teacher_model
self.cfg.dss_args.ssl_alg = ssl_alg
self.cfg.dss_args.loss = consistency_nored
self.cfg.dss_args.num_classes = num_classes
self.cfg.dss_args.num_iters = self.cfg.train_args.iteration
self.cfg.dss_args.eta = self.cfg.optimizer.lr
self.cfg.dss_args.device = self.cfg.train_args.device
ult_loader = GradMatchDataLoader(ult_seq_loader, lt_seq_loader, self.cfg.dss_args, logger=logger,
batch_size=self.cfg.dataloader.ul_batch_size,
pin_memory=self.cfg.dataloader.pin_memory,
num_workers=self.cfg.dataloader.num_workers)
elif self.cfg.dss_args.type in ['RETRIEVE', 'RETRIEVE-Warm', 'RETRIEVEPB', 'RETRIEVEPB-Warm']:
"""
############################## RETRIEVE Dataloader Additional Arguments ##############################
"""
self.cfg.dss_args.model = model
self.cfg.dss_args.tea_model = teacher_model
self.cfg.dss_args.ssl_alg = ssl_alg
self.cfg.dss_args.loss = consistency_nored
self.cfg.dss_args.num_classes = num_classes
self.cfg.dss_args.num_iters = max_iteration
self.cfg.dss_args.eta = self.cfg.optimizer.lr
self.cfg.dss_args.device = self.cfg.train_args.device
ult_loader = RETRIEVEDataLoader(ult_seq_loader, lt_seq_loader, self.cfg.dss_args, logger=logger,
batch_size=self.cfg.dataloader.ul_batch_size,
pin_memory=self.cfg.dataloader.pin_memory,
num_workers=self.cfg.dataloader.num_workers)
elif self.cfg.dss_args.type in ['CRAIG', 'CRAIG-Warm', 'CRAIGPB', 'CRAIGPB-Warm']:
"""
############################## CRAIG Dataloader Additional Arguments ##############################
"""
self.cfg.dss_args.model = model
self.cfg.dss_args.tea_model = teacher_model
self.cfg.dss_args.ssl_alg = ssl_alg
self.cfg.dss_args.loss = consistency_nored
self.cfg.dss_args.num_classes = num_classes
self.cfg.dss_args.num_iters = max_iteration
self.cfg.dss_args.device = self.cfg.train_args.device
ult_loader = CRAIGDataLoader(ult_seq_loader, lt_seq_loader, self.cfg.dss_args, logger=logger,
batch_size=self.cfg.dataloader.ul_batch_size,
pin_memory=self.cfg.dataloader.pin_memory,
num_workers=self.cfg.dataloader.num_workers)
elif self.cfg.dss_args.type in ['Random', 'Random-Warm']:
"""
############################## Random Dataloader Additional Arguments ##############################
"""
self.cfg.dss_args.device = self.cfg.train_args.device
self.cfg.dss_args.num_classes = num_classes
self.cfg.dss_args.num_iters = max_iteration
self.cfg.dss_args.device = self.cfg.train_args.device
ult_loader = RandomDataLoader(ult_seq_loader, self.cfg.dss_args, logger=logger,
batch_size=self.cfg.dataloader.ul_batch_size,
pin_memory=self.cfg.dataloader.pin_memory,
num_workers=self.cfg.dataloader.num_workers)
elif self.cfg.dss_args.type == ['OLRandom', 'OLRandom-Warm']:
"""
############################## OLRandom Dataloader Additional Arguments ##############################
"""
self.cfg.dss_args.device = self.cfg.train_args.device
self.cfg.dss_args.num_classes = num_classes
self.cfg.dss_args.num_iters = max_iteration
self.cfg.dss_args.device = self.cfg.train_args.device
ult_loader = OLRandomDataLoader(ult_seq_loader, self.cfg.dss_args, logger=logger,
batch_size=self.cfg.dataloader.ul_batch_size,
pin_memory=self.cfg.dataloader.pin_memory,
num_workers=self.cfg.dataloader.num_workers)
elif self.cfg.dss_args.type == 'Full':
"""
############################## Full Dataloader Additional Arguments ##############################
"""
wt_trainset = WeightedSubset(ult_data, list(range(len(ult_data))), [1] * len(ult_data))
ult_loader = torch.utils.data.DataLoader(wt_trainset,
batch_size=self.cfg.dataloader.ul_batch_size,
pin_memory=self.cfg.dataloader.pin_memory,
num_workers=self.cfg.dataloader.num_workers)
model.train()
logger.info(model)
if self.cfg.optimizer.type == "sgd":
optimizer = optim.SGD(
model.parameters(), self.cfg.optimizer.lr, self.cfg.optimizer.momentum, weight_decay=0, nesterov=True)
elif self.cfg.optimizer.type == "adam":
optimizer = optim.Adam(
model.parameters(), self.cfg.optimizer.lr, (self.cfg.optimizer, 0.999), weight_decay=0
)
else:
raise NotImplementedError
# set lr scheduler
if self.cfg.scheduler.lr_decay == "cos":
if self.cfg.dss_args.type == 'Full':
lr_scheduler = scheduler.CosineAnnealingLR(optimizer, max_iteration)
else:
lr_scheduler = scheduler.CosineAnnealingLR(optimizer,
self.cfg.train_args.iteration * self.cfg.dss_args.fraction)
elif self.cfg.scheduler.lr_decay == "step":
# TODO: fixed milestones
lr_scheduler = optim.lr_scheduler.MultiStepLR(optimizer, [400000, ], self.cfg.scheduler.lr_decay_rate)
else:
raise NotImplementedError
# init meter
metric_meter = Meter()
test_acc_list = []
raw_acc_list = []
logger.info("training")
if self.cfg.dataset.feature == 'ood':
self.get_ul_ood_ratio(ult_loader.dataset)
elif self.cfg.dataset.feature == 'classimb':
self.get_ul_classimb_ratio(ult_loader.dataset)
iter_count = 1
subset_selection_time = 0
training_time = 0
while iter_count <= max_iteration:
lt_loader = DataLoader(
lt_data,
self.cfg.dataloader.l_batch_size,
sampler=dataset_utils.InfiniteSampler(len(lt_data), len(list(
ult_loader.batch_sampler)) * self.cfg.dataloader.l_batch_size),
num_workers=self.cfg.dataloader.num_workers
)
logger.debug("Data loader iteration count is: {0:d}".format(len(list(ult_loader.batch_sampler))))
for batch_idx, (l_data, ul_data) in enumerate(zip(lt_loader, ult_loader)):
batch_start_time = time.time()
if iter_count > max_iteration:
break
l_aug, labels = l_data
ul_w_aug, ul_s_aug, _, weights = ul_data
if self.cfg.dataset.feature in ['ood', 'classimb']:
ood = True
else:
ood = False
params = self.param_update(
iter_count, model, teacher_model, optimizer, ssl_alg,
consistency, l_aug.to(device), ul_w_aug.to(device),
ul_s_aug.to(device), labels.to(device),
average_model, weights=weights.to(device), ood=ood)
training_time += (time.time() - batch_start_time)
# moving average for reporting losses and accuracy
metric_meter.add(params, ignores=["coef"])
# display losses every cfg.disp iterations
if ((iter_count + 1) % self.cfg.train_args.disp) == 0:
state = metric_meter.state(
header=f'[{iter_count + 1}/{max_iteration}]',
footer=f'ssl coef {params["coef"]:.4g} | lr {optimizer.param_groups[0]["lr"]:.4g}'
)
logger.info(state)
lr_scheduler.step()
if ((iter_count + 1) % self.cfg.ckpt.checkpoint) == 0 or (iter_count + 1) == max_iteration:
with torch.no_grad():
if self.cfg.ssl_eval_args.weight_average:
eval_model = average_model
else:
eval_model = model
logger.info("test")
mean_raw_acc, mean_test_acc, mean_test_loss = self.evaluation(model, eval_model, test_loader,
device)
logger.info("test loss %f | test acc. %f | raw acc. %f", mean_test_loss, mean_test_acc,
mean_raw_acc)
test_acc_list.append(mean_test_acc)
raw_acc_list.append(mean_raw_acc)
torch.save(model.state_dict(), os.path.join(self.cfg.train_args.out_dir, "model_checkpoint.pth"))
torch.save(optimizer.state_dict(),
os.path.join(self.cfg.train_args.out_dir, "optimizer_checkpoint.pth"))
iter_count += 1
numpy.save(os.path.join(self.cfg.train_args.out_dir, "results"), test_acc_list)
numpy.save(os.path.join(self.cfg.train_args.out_dir, "raw_results"), raw_acc_list)
logger.info("Total Time taken: %f", training_time + subset_selection_time)
logger.info("Subset Selection Time: %f", subset_selection_time)
accuracies = {}
for i in [1, 10, 20, 50]:
logger.info("mean test acc. over last %d checkpoints: %f", i, numpy.median(test_acc_list[-i:]))
logger.info("mean test acc. for raw model over last %d checkpoints: %f", i, numpy.median(raw_acc_list[-i:]))
accuracies[f"last{i}"] = numpy.median(test_acc_list[-i:])
with open(os.path.join(self.cfg.train_args.out_dir, "results.json"), "w") as f:
json.dump(accuracies, f, sort_keys=True)
if __name__ == "__main__":
torch.multiprocessing.freeze_support()
|
[
"numpy.random.seed",
"torch.cat",
"cords.selectionstrategies.helpers.ssl_lib.algs.builder.gen_ssl_alg",
"logging.Formatter",
"torch.no_grad",
"os.path.join",
"cords.utils.models.CNN13",
"cords.utils.config_utils.load_config_data",
"torch.utils.data.DataLoader",
"cords.utils.models.CNN",
"torch.multiprocessing.freeze_support",
"random.seed",
"torch.nn.functional.log_softmax",
"numpy.bincount",
"json.dump",
"cords.selectionstrategies.helpers.ssl_lib.misc.meter.Meter",
"torch.zeros_like",
"numpy.median",
"torch.manual_seed",
"numpy.log2",
"logging.StreamHandler",
"cords.utils.models.ShakeNet",
"torch.nn.functional.cross_entropy",
"cords.utils.models.utils.ema_update",
"cords.selectionstrategies.helpers.ssl_lib.consistency.builder.gen_consistency",
"os.makedirs",
"cords.utils.models.WideResNet",
"cords.selectionstrategies.helpers.ssl_lib.param_scheduler.scheduler.CosineAnnealingLR",
"cords.utils.data.datasets.SSL.gen_dataset",
"time.time",
"cords.selectionstrategies.helpers.ssl_lib.algs.utils.anneal_loss",
"torch.chunk",
"os.path.expanduser",
"logging.getLogger",
"torch.optim.lr_scheduler.MultiStepLR"
] |
[((23483, 23521), 'torch.multiprocessing.freeze_support', 'torch.multiprocessing.freeze_support', ([], {}), '()\n', (23519, 23521), False, 'import torch\n'), ((1166, 1200), 'cords.utils.config_utils.load_config_data', 'load_config_data', (['self.config_file'], {}), '(self.config_file)\n', (1182, 1200), False, 'from cords.utils.config_utils import load_config_data\n'), ((1621, 1661), 'os.makedirs', 'os.makedirs', (['all_logs_dir'], {'exist_ok': '(True)'}), '(all_logs_dir, exist_ok=True)\n', (1632, 1661), False, 'import os\n'), ((1711, 1811), 'logging.Formatter', 'logging.Formatter', (['"""[%(asctime)s] %(name)s %(levelname)s: %(message)s"""'], {'datefmt': '"""%m/%d %H:%M:%S"""'}), "('[%(asctime)s] %(name)s %(levelname)s: %(message)s',\n datefmt='%m/%d %H:%M:%S')\n", (1728, 1811), False, 'import logging\n'), ((1874, 1901), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1891, 1901), False, 'import logging\n'), ((1965, 2005), 'logging.StreamHandler', 'logging.StreamHandler', ([], {'stream': 'sys.stdout'}), '(stream=sys.stdout)\n', (1986, 2005), False, 'import logging\n'), ((4725, 4736), 'time.time', 'time.time', ([], {}), '()\n', (4734, 4736), False, 'import time\n'), ((4756, 4814), 'torch.cat', 'torch.cat', (['[labeled_data, ul_weak_data, ul_strong_data]', '(0)'], {}), '([labeled_data, ul_weak_data, ul_strong_data], 0)\n', (4765, 4814), False, 'import torch\n'), ((5021, 5072), 'torch.chunk', 'torch.chunk', (['stu_logits[labels.shape[0]:]', '(2)'], {'dim': '(0)'}), '(stu_logits[labels.shape[0]:], 2, dim=0)\n', (5032, 5072), False, 'import torch\n'), ((9157, 9198), 'numpy.bincount', 'numpy.bincount', (['actual_lbls'], {'minlength': '(10)'}), '(actual_lbls, minlength=10)\n', (9171, 9198), False, 'import numpy, random, time, json, copy\n'), ((9581, 9622), 'numpy.bincount', 'numpy.bincount', (['actual_lbls'], {'minlength': '(10)'}), '(actual_lbls, minlength=10)\n', (9595, 9622), False, 'import numpy, random, time, json, copy\n'), ((9947, 9990), 'torch.manual_seed', 'torch.manual_seed', (['self.cfg.train_args.seed'], {}), '(self.cfg.train_args.seed)\n', (9964, 9990), False, 'import torch\n'), ((9999, 10042), 'numpy.random.seed', 'numpy.random.seed', (['self.cfg.train_args.seed'], {}), '(self.cfg.train_args.seed)\n', (10016, 10042), False, 'import numpy, random, time, json, copy\n'), ((10051, 10088), 'random.seed', 'random.seed', (['self.cfg.train_args.seed'], {}), '(self.cfg.train_args.seed)\n', (10062, 10088), False, 'import numpy, random, time, json, copy\n'), ((10259, 10345), 'cords.utils.data.datasets.SSL.gen_dataset', 'gen_dataset', (['self.cfg.dataset.root', 'self.cfg.dataset.name', '(False)', 'self.cfg', 'logger'], {}), '(self.cfg.dataset.root, self.cfg.dataset.name, False, self.cfg,\n logger)\n', (10270, 10345), False, 'from cords.utils.data.datasets.SSL import gen_dataset\n'), ((10469, 10520), 'cords.selectionstrategies.helpers.ssl_lib.consistency.builder.gen_consistency', 'gen_consistency', (['self.cfg.ssl_args.consis', 'self.cfg'], {}), '(self.cfg.ssl_args.consis, self.cfg)\n', (10484, 10520), False, 'from cords.selectionstrategies.helpers.ssl_lib.consistency.builder import gen_consistency\n'), ((10549, 10609), 'cords.selectionstrategies.helpers.ssl_lib.consistency.builder.gen_consistency', 'gen_consistency', (["(self.cfg.ssl_args.consis + '_red')", 'self.cfg'], {}), "(self.cfg.ssl_args.consis + '_red', self.cfg)\n", (10564, 10609), False, 'from cords.selectionstrategies.helpers.ssl_lib.consistency.builder import gen_consistency\n'), ((10656, 10700), 'cords.selectionstrategies.helpers.ssl_lib.algs.builder.gen_ssl_alg', 'gen_ssl_alg', (['self.cfg.ssl_args.alg', 'self.cfg'], {}), '(self.cfg.ssl_args.alg, self.cfg)\n', (10667, 10700), False, 'from cords.selectionstrategies.helpers.ssl_lib.algs.builder import gen_ssl_alg\n'), ((11861, 11964), 'torch.utils.data.DataLoader', 'DataLoader', (['ult_data'], {'batch_size': 'self.cfg.dataloader.ul_batch_size', 'shuffle': '(False)', 'pin_memory': '(True)'}), '(ult_data, batch_size=self.cfg.dataloader.ul_batch_size, shuffle=\n False, pin_memory=True)\n', (11871, 11964), False, 'from torch.utils.data import DataLoader, Subset\n'), ((12021, 12122), 'torch.utils.data.DataLoader', 'DataLoader', (['lt_data'], {'batch_size': 'self.cfg.dataloader.l_batch_size', 'shuffle': '(False)', 'pin_memory': '(True)'}), '(lt_data, batch_size=self.cfg.dataloader.l_batch_size, shuffle=\n False, pin_memory=True)\n', (12031, 12122), False, 'from torch.utils.data import DataLoader, Subset\n'), ((12176, 12282), 'torch.utils.data.DataLoader', 'DataLoader', (['test_data', '(1)'], {'shuffle': '(False)', 'drop_last': '(False)', 'num_workers': 'self.cfg.dataloader.num_workers'}), '(test_data, 1, shuffle=False, drop_last=False, num_workers=self.\n cfg.dataloader.num_workers)\n', (12186, 12282), False, 'from torch.utils.data import DataLoader, Subset\n'), ((19112, 19119), 'cords.selectionstrategies.helpers.ssl_lib.misc.meter.Meter', 'Meter', ([], {}), '()\n', (19117, 19119), False, 'from cords.selectionstrategies.helpers.ssl_lib.misc.meter import Meter\n'), ((1235, 1282), 'os.path.expanduser', 'osp.expanduser', (['self.cfg.train_args.results_dir'], {}), '(self.cfg.train_args.results_dir)\n', (1249, 1282), True, 'import os.path as osp\n'), ((2177, 2235), 'os.path.join', 'os.path.join', (['all_logs_dir', "(self.cfg.dataset.name + '.log')"], {}), "(all_logs_dir, self.cfg.dataset.name + '.log')\n", (2189, 2235), False, 'import os\n'), ((2688, 2725), 'cords.utils.models.WideResNet', 'WideResNet', (['num_classes', '(32)', 'scale', '(4)'], {}), '(num_classes, 32, scale, 4)\n', (2698, 2725), False, 'from cords.utils.models import WideResNet, ShakeNet, CNN13, CNN\n'), ((3286, 3301), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3299, 3301), False, 'import torch\n'), ((5141, 5197), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['labeled_preds', 'labels'], {'reduction': '"""none"""'}), "(labeled_preds, labels, reduction='none')\n", (5156, 5197), True, 'import torch.nn.functional as F\n'), ((5225, 5405), 'cords.selectionstrategies.helpers.ssl_lib.algs.utils.anneal_loss', 'alg_utils.anneal_loss', (['labeled_preds', 'labels', 'none_reduced_loss', '(cur_iteration + 1)', 'self.cfg.train_args.iteration', 'labeled_preds.shape[1]', 'self.cfg.optimizer.tsa_schedule'], {}), '(labeled_preds, labels, none_reduced_loss, \n cur_iteration + 1, self.cfg.train_args.iteration, labeled_preds.shape[1\n ], self.cfg.optimizer.tsa_schedule)\n', (5246, 5405), True, 'from cords.selectionstrategies.helpers.ssl_lib.algs import utils as alg_utils\n'), ((5470, 5508), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['labeled_preds', 'labels'], {}), '(labeled_preds, labels)\n', (5485, 5508), True, 'import torch.nn.functional as F\n'), ((6908, 6938), 'torch.zeros_like', 'torch.zeros_like', (['L_supervised'], {}), '(L_supervised)\n', (6924, 6938), False, 'import torch\n'), ((7824, 8072), 'cords.utils.models.utils.ema_update', 'model_utils.ema_update', (['teacher_model', 'model', 'self.cfg.ssl_args.ema_teacher_factor', '(self.cfg.optimizer.weight_decay * cur_lr if self.cfg.ssl_args.ema_apply_wd\n else None)', '(cur_iteration if self.cfg.ssl_args.ema_teacher_warmup else None)'], {}), '(teacher_model, model, self.cfg.ssl_args.\n ema_teacher_factor, self.cfg.optimizer.weight_decay * cur_lr if self.\n cfg.ssl_args.ema_apply_wd else None, cur_iteration if self.cfg.ssl_args\n .ema_teacher_warmup else None)\n', (7846, 8072), True, 'from cords.utils.models import utils as model_utils\n'), ((8246, 8428), 'cords.utils.models.utils.ema_update', 'model_utils.ema_update', (['average_model', 'model', 'self.cfg.ssl_eval_args.wa_ema_factor', '(self.cfg.optimizer.weight_decay * cur_lr if self.cfg.ssl_eval_args.\n wa_apply_wd else None)'], {}), '(average_model, model, self.cfg.ssl_eval_args.\n wa_ema_factor, self.cfg.optimizer.weight_decay * cur_lr if self.cfg.\n ssl_eval_args.wa_apply_wd else None)\n', (8268, 8428), True, 'from cords.utils.models import utils as model_utils\n'), ((22637, 22689), 'os.path.join', 'os.path.join', (['self.cfg.train_args.out_dir', '"""results"""'], {}), "(self.cfg.train_args.out_dir, 'results')\n", (22649, 22689), False, 'import os\n'), ((22725, 22781), 'os.path.join', 'os.path.join', (['self.cfg.train_args.out_dir', '"""raw_results"""'], {}), "(self.cfg.train_args.out_dir, 'raw_results')\n", (22737, 22781), False, 'import os\n'), ((23276, 23308), 'numpy.median', 'numpy.median', (['test_acc_list[-i:]'], {}), '(test_acc_list[-i:])\n', (23288, 23308), False, 'import numpy, random, time, json, copy\n'), ((23410, 23450), 'json.dump', 'json.dump', (['accuracies', 'f'], {'sort_keys': '(True)'}), '(accuracies, f, sort_keys=True)\n', (23419, 23450), False, 'import numpy, random, time, json, copy\n'), ((2623, 2640), 'numpy.log2', 'np.log2', (['img_size'], {}), '(img_size)\n', (2630, 2640), True, 'import numpy as np\n'), ((2775, 2810), 'cords.utils.models.ShakeNet', 'ShakeNet', (['num_classes', '(32)', 'scale', '(4)'], {}), '(num_classes, 32, scale, 4)\n', (2783, 2810), False, 'from cords.utils.models import WideResNet, ShakeNet, CNN13, CNN\n'), ((3519, 3549), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['preds', 'labels'], {}), '(preds, labels)\n', (3534, 3549), True, 'import torch.nn.functional as F\n'), ((5819, 5870), 'torch.chunk', 'torch.chunk', (['tea_logits[labels.shape[0]:]', '(2)'], {'dim': '(0)'}), '(tea_logits[labels.shape[0]:], 2, dim=0)\n', (5830, 5870), False, 'import torch\n'), ((8856, 8867), 'time.time', 'time.time', ([], {}), '()\n', (8865, 8867), False, 'import time\n'), ((18550, 18603), 'cords.selectionstrategies.helpers.ssl_lib.param_scheduler.scheduler.CosineAnnealingLR', 'scheduler.CosineAnnealingLR', (['optimizer', 'max_iteration'], {}), '(optimizer, max_iteration)\n', (18577, 18603), False, 'from cords.selectionstrategies.helpers.ssl_lib.param_scheduler import scheduler\n'), ((18653, 18756), 'cords.selectionstrategies.helpers.ssl_lib.param_scheduler.scheduler.CosineAnnealingLR', 'scheduler.CosineAnnealingLR', (['optimizer', '(self.cfg.train_args.iteration * self.cfg.dss_args.fraction)'], {}), '(optimizer, self.cfg.train_args.iteration * self\n .cfg.dss_args.fraction)\n', (18680, 18756), False, 'from cords.selectionstrategies.helpers.ssl_lib.param_scheduler import scheduler\n'), ((18927, 19017), 'torch.optim.lr_scheduler.MultiStepLR', 'optim.lr_scheduler.MultiStepLR', (['optimizer', '[400000]', 'self.cfg.scheduler.lr_decay_rate'], {}), '(optimizer, [400000], self.cfg.scheduler.\n lr_decay_rate)\n', (18957, 19017), True, 'import torch.optim as optim\n'), ((20126, 20137), 'time.time', 'time.time', ([], {}), '()\n', (20135, 20137), False, 'import time\n'), ((23084, 23116), 'numpy.median', 'numpy.median', (['test_acc_list[-i:]'], {}), '(test_acc_list[-i:])\n', (23096, 23116), False, 'import numpy, random, time, json, copy\n'), ((23206, 23237), 'numpy.median', 'numpy.median', (['raw_acc_list[-i:]'], {}), '(raw_acc_list[-i:])\n', (23218, 23237), False, 'import numpy, random, time, json, copy\n'), ((23328, 23385), 'os.path.join', 'os.path.join', (['self.cfg.train_args.out_dir', '"""results.json"""'], {}), "(self.cfg.train_args.out_dir, 'results.json')\n", (23340, 23385), False, 'import os\n'), ((2860, 2882), 'cords.utils.models.CNN13', 'CNN13', (['num_classes', '(32)'], {}), '(num_classes, 32)\n', (2865, 2882), False, 'from cords.utils.models import WideResNet, ShakeNet, CNN13, CNN\n'), ((20816, 20827), 'time.time', 'time.time', ([], {}), '()\n', (20825, 20827), False, 'import time\n'), ((2930, 2946), 'cords.utils.models.CNN', 'CNN', (['num_classes'], {}), '(num_classes)\n', (2933, 2946), False, 'from cords.utils.models import WideResNet, ShakeNet, CNN13, CNN\n'), ((21559, 21574), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (21572, 21574), False, 'import torch\n'), ((22361, 22426), 'os.path.join', 'os.path.join', (['self.cfg.train_args.out_dir', '"""model_checkpoint.pth"""'], {}), "(self.cfg.train_args.out_dir, 'model_checkpoint.pth')\n", (22373, 22426), False, 'import os\n'), ((22514, 22583), 'os.path.join', 'os.path.join', (['self.cfg.train_args.out_dir', '"""optimizer_checkpoint.pth"""'], {}), "(self.cfg.train_args.out_dir, 'optimizer_checkpoint.pth')\n", (22526, 22583), False, 'import os\n'), ((17556, 17740), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['wt_trainset'], {'batch_size': 'self.cfg.dataloader.ul_batch_size', 'pin_memory': 'self.cfg.dataloader.pin_memory', 'num_workers': 'self.cfg.dataloader.num_workers'}), '(wt_trainset, batch_size=self.cfg.dataloader.\n ul_batch_size, pin_memory=self.cfg.dataloader.pin_memory, num_workers=\n self.cfg.dataloader.num_workers)\n', (17583, 17740), False, 'import torch\n'), ((7299, 7342), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['stu_unlabeled_weak_logits', '(1)'], {}), '(stu_unlabeled_weak_logits, 1)\n', (7312, 7342), True, 'import torch.nn.functional as F\n')]
|
from copy import copy
import numpy as np
import pytest
import aesara
from aesara import tensor as at
from aesara.scan.utils import ScanArgs
@pytest.fixture(scope="module", autouse=True)
def set_aesara_flags():
with aesara.config.change_flags(cxx="", mode="FAST_COMPILE"):
yield
def create_test_hmm():
rng_state = np.random.default_rng(23422)
rng_tt = aesara.shared(rng_state, name="rng", borrow=True)
rng_tt.tag.is_rng = True
rng_tt.default_update = rng_tt
N_tt = at.iscalar("N")
N_tt.tag.test_value = 10
M_tt = at.iscalar("M")
M_tt.tag.test_value = 2
mus_tt = at.matrix("mus")
mus_tt.tag.test_value = np.stack(
[np.arange(0.0, 10), np.arange(0.0, -10, -1)], axis=-1
).astype(aesara.config.floatX)
sigmas_tt = at.ones((N_tt,))
sigmas_tt.name = "sigmas"
pi_0_rv = at.random.dirichlet(at.ones((M_tt,)), rng=rng_tt, name="pi_0")
Gamma_rv = at.random.dirichlet(at.ones((M_tt, M_tt)), rng=rng_tt, name="Gamma")
S_0_rv = at.random.categorical(pi_0_rv, rng=rng_tt, name="S_0")
def scan_fn(mus_t, sigma_t, S_tm1, Gamma_t, rng):
S_t = at.random.categorical(Gamma_t[S_tm1], rng=rng, name="S_t")
Y_t = at.random.normal(mus_t[S_t], sigma_t, rng=rng, name="Y_t")
return S_t, Y_t
(S_rv, Y_rv), scan_updates = aesara.scan(
fn=scan_fn,
sequences=[mus_tt, sigmas_tt],
non_sequences=[Gamma_rv, rng_tt],
outputs_info=[{"initial": S_0_rv, "taps": [-1]}, {}],
strict=True,
name="scan_rv",
)
Y_rv.name = "Y_rv"
scan_op = Y_rv.owner.op
scan_args = ScanArgs.from_node(Y_rv.owner)
Gamma_in = scan_args.inner_in_non_seqs[0]
Y_t = scan_args.inner_out_nit_sot[0]
mus_t = scan_args.inner_in_seqs[0]
sigmas_t = scan_args.inner_in_seqs[1]
S_t = scan_args.inner_out_sit_sot[0]
rng_in = scan_args.inner_out_shared[0]
rng_updates = scan_updates[rng_tt]
rng_updates.name = "rng_updates"
mus_in = Y_rv.owner.inputs[1]
mus_in.name = "mus_in"
sigmas_in = Y_rv.owner.inputs[2]
sigmas_in.name = "sigmas_in"
# The output `S_rv` is really `S_rv[1:]`, so we have to extract the actual
# `Scan` output: `S_rv`.
S_in = S_rv.owner.inputs[0]
S_in.name = "S_in"
return locals()
def test_ScanArgs():
# Make sure we can create an empty `ScanArgs`
scan_args = ScanArgs.create_empty()
assert scan_args.n_steps is None
for name in scan_args.field_names:
if name == "n_steps":
continue
assert len(getattr(scan_args, name)) == 0
with pytest.raises(TypeError):
ScanArgs.from_node(at.ones(2).owner)
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
scan_op = hmm_model_env["scan_op"]
# Make sure we can get alternate variables
test_v = scan_args.outer_out_sit_sot[0]
alt_test_v = scan_args.get_alt_field(test_v, "inner_out")
assert alt_test_v == scan_args.inner_out_sit_sot[0]
alt_test_v = scan_args.get_alt_field(test_v, "outer_in")
assert alt_test_v == scan_args.outer_in_sit_sot[0]
# Check the `__repr__` and `__str__`
scan_args_repr = repr(scan_args)
# Just make sure it doesn't err-out
assert scan_args_repr.startswith("ScanArgs")
# Check the properties that allow us to use
# `Scan.get_oinp_iinp_iout_oout_mappings` as-is to implement
# `ScanArgs.var_mappings`
assert scan_args.n_nit_sot == scan_op.n_nit_sot
assert scan_args.n_mit_mot == scan_op.n_mit_mot
# The `scan_args` base class always clones the inner-graph;
# here we make sure it doesn't (and that all the inputs are the same)
assert scan_args.inputs == scan_op.inputs
assert scan_args.info == scan_op.info
# Check that `ScanArgs.find_among_fields` works
test_v = scan_op.inner_seqs(scan_op.inputs)[1]
field_info = scan_args.find_among_fields(test_v)
assert field_info.name == "inner_in_seqs"
assert field_info.index == 1
assert field_info.inner_index is None
assert scan_args.inner_inputs[field_info.agg_index] == test_v
test_l = scan_op.inner_non_seqs(scan_op.inputs)
# We didn't index this argument, so it's a `list` (i.e. bad input)
field_info = scan_args.find_among_fields(test_l)
assert field_info is None
test_v = test_l[0]
field_info = scan_args.find_among_fields(test_v)
assert field_info.name == "inner_in_non_seqs"
assert field_info.index == 0
assert field_info.inner_index is None
assert scan_args.inner_inputs[field_info.agg_index] == test_v
scan_args_copy = copy(scan_args)
assert scan_args_copy is not scan_args
assert scan_args_copy == scan_args
assert scan_args_copy != test_v
scan_args_copy.outer_in_seqs.pop()
assert scan_args_copy != scan_args
def test_ScanArgs_basics_mit_sot():
rng_state = np.random.RandomState(np.random.MT19937(np.random.SeedSequence(1234)))
rng_tt = aesara.shared(rng_state, name="rng", borrow=True)
rng_tt.tag.is_rng = True
rng_tt.default_update = rng_tt
N_tt = at.iscalar("N")
N_tt.tag.test_value = 10
M_tt = at.iscalar("M")
M_tt.tag.test_value = 2
mus_tt = at.matrix("mus")
mus_tt.tag.test_value = np.stack(
[np.arange(0.0, 10), np.arange(0.0, -10, -1)], axis=-1
).astype(aesara.config.floatX)
sigmas_tt = at.ones((N_tt,))
sigmas_tt.name = "sigmas"
pi_0_rv = at.random.dirichlet(at.ones((M_tt,)), rng=rng_tt, name="pi_0")
Gamma_rv = at.random.dirichlet(at.ones((M_tt, M_tt)), rng=rng_tt, name="Gamma")
S_0_rv = at.random.categorical(pi_0_rv, rng=rng_tt, name="S_0")
def scan_fn(mus_t, sigma_t, S_tm2, S_tm1, Gamma_t, rng):
S_t = at.random.categorical(Gamma_t[S_tm2], rng=rng, name="S_t")
Y_t = at.random.normal(mus_t[S_tm1], sigma_t, rng=rng, name="Y_t")
return S_t, Y_t
(S_rv, Y_rv), scan_updates = aesara.scan(
fn=scan_fn,
sequences=[mus_tt, sigmas_tt],
non_sequences=[Gamma_rv, rng_tt],
outputs_info=[{"initial": at.stack([S_0_rv, S_0_rv]), "taps": [-2, -1]}, {}],
strict=True,
name="scan_rv",
)
# Adding names should make output easier to read
Y_rv.name = "Y_rv"
# This `S_rv` outer-output is actually a `Subtensor` of the "real" output
S_rv = S_rv.owner.inputs[0]
S_rv.name = "S_rv"
rng_updates = scan_updates[rng_tt]
rng_updates.name = "rng_updates"
mus_in = Y_rv.owner.inputs[1]
mus_in.name = "mus_in"
sigmas_in = Y_rv.owner.inputs[2]
sigmas_in.name = "sigmas_in"
scan_args = ScanArgs.from_node(Y_rv.owner)
test_v = scan_args.inner_in_mit_sot[0][1]
field_info = scan_args.find_among_fields(test_v)
assert field_info.name == "inner_in_mit_sot"
assert field_info.index == 0
assert field_info.inner_index == 1
assert field_info.agg_index == 3
rm_info = scan_args._remove_from_fields(at.ones(2))
assert rm_info is None
rm_info = scan_args._remove_from_fields(test_v)
assert rm_info.name == "inner_in_mit_sot"
assert rm_info.index == 0
assert rm_info.inner_index == 1
assert rm_info.agg_index == 3
def test_ScanArgs_remove_inner_input():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
hmm_model_env["S_rv"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Check `ScanArgs.remove_from_fields` by removing `sigmas[t]` (i.e. the
# inner-graph input)
scan_args_copy = copy(scan_args)
test_v = sigmas_t
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=False)
removed_nodes, _ = zip(*rm_info)
assert sigmas_t in removed_nodes
assert sigmas_t not in scan_args_copy.inner_in_seqs
assert Y_t not in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 1
scan_args_copy = copy(scan_args)
test_v = sigmas_t
# This removal includes dependents
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
# `sigmas[t]` (i.e. inner-graph input) should be gone
assert sigmas_t in removed_nodes
assert sigmas_t not in scan_args_copy.inner_in_seqs
# `Y_t` (i.e. inner-graph output) should be gone
assert Y_t in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 0
# `Y_rv` (i.e. outer-graph output) should be gone
assert Y_rv in removed_nodes
assert Y_rv not in scan_args_copy.outer_outputs
assert len(scan_args_copy.outer_out_nit_sot) == 0
# `sigmas_in` (i.e. outer-graph input) should be gone
assert sigmas_in in removed_nodes
assert test_v not in scan_args_copy.inner_in_seqs
# These shouldn't have been removed
assert S_t in scan_args_copy.inner_out_sit_sot
assert S_in in scan_args_copy.outer_out_sit_sot
assert Gamma_in in scan_args_copy.inner_in_non_seqs
assert Gamma_rv in scan_args_copy.outer_in_non_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
# The other `Y_rv`-related inputs currently aren't removed, even though
# they're no longer needed.
# TODO: Would be nice if we did this, too
# assert len(scan_args_copy.outer_in_seqs) == 0
# TODO: Would be nice if we did this, too
# assert len(scan_args_copy.inner_in_seqs) == 0
# We shouldn't be able to remove the removed node
with pytest.raises(ValueError):
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
def test_ScanArgs_remove_outer_input():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
hmm_model_env["S_rv"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `sigmas` (i.e. the outer-input)
scan_args_copy = copy(scan_args)
test_v = sigmas_in
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
# `sigmas_in` (i.e. outer-graph input) should be gone
assert scan_args.outer_in_seqs[-1] in removed_nodes
assert test_v not in scan_args_copy.inner_in_seqs
# `sigmas[t]` should be gone
assert sigmas_t in removed_nodes
assert sigmas_t not in scan_args_copy.inner_in_seqs
# `Y_t` (i.e. inner-graph output) should be gone
assert Y_t in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 0
# `Y_rv` (i.e. outer-graph output) should be gone
assert Y_rv not in scan_args_copy.outer_outputs
assert len(scan_args_copy.outer_out_nit_sot) == 0
assert S_t in scan_args_copy.inner_out_sit_sot
assert S_in in scan_args_copy.outer_out_sit_sot
assert Gamma_in in scan_args_copy.inner_in_non_seqs
assert Gamma_rv in scan_args_copy.outer_in_non_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_inner_output():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
hmm_model_env["sigmas_in"]
hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
hmm_model_env["S_rv"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `Y_t` (i.e. the inner-output)
scan_args_copy = copy(scan_args)
test_v = Y_t
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
# `Y_t` (i.e. inner-graph output) should be gone
assert Y_t in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 0
# `Y_rv` (i.e. outer-graph output) should be gone
assert Y_rv not in scan_args_copy.outer_outputs
assert len(scan_args_copy.outer_out_nit_sot) == 0
assert S_t in scan_args_copy.inner_out_sit_sot
assert S_in in scan_args_copy.outer_out_sit_sot
assert Gamma_in in scan_args_copy.inner_in_non_seqs
assert Gamma_rv in scan_args_copy.outer_in_non_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_outer_output():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
hmm_model_env["sigmas_in"]
hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `Y_rv` (i.e. a nit-sot outer-output)
scan_args_copy = copy(scan_args)
test_v = Y_rv
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
# `Y_t` (i.e. inner-graph output) should be gone
assert Y_t in removed_nodes
assert len(scan_args_copy.inner_out_nit_sot) == 0
# `Y_rv` (i.e. outer-graph output) should be gone
assert Y_rv not in scan_args_copy.outer_outputs
assert len(scan_args_copy.outer_out_nit_sot) == 0
assert S_t in scan_args_copy.inner_out_sit_sot
assert S_in in scan_args_copy.outer_out_sit_sot
assert Gamma_in in scan_args_copy.inner_in_non_seqs
assert Gamma_rv in scan_args_copy.outer_in_non_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_nonseq_outer_input():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
Y_t = hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
mus_in = hmm_model_env["mus_in"]
mus_t = hmm_model_env["mus_t"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `Gamma` (i.e. a non-sequence outer-input)
scan_args_copy = copy(scan_args)
test_v = Gamma_rv
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
assert Gamma_rv in removed_nodes
assert Gamma_in in removed_nodes
assert S_in in removed_nodes
assert S_t in removed_nodes
assert Y_t in removed_nodes
assert Y_rv in removed_nodes
assert mus_in in scan_args_copy.outer_in_seqs
assert sigmas_in in scan_args_copy.outer_in_seqs
assert mus_t in scan_args_copy.inner_in_seqs
assert sigmas_t in scan_args_copy.inner_in_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_nonseq_inner_input():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
hmm_model_env["Y_t"]
hmm_model_env["Y_rv"]
mus_in = hmm_model_env["mus_in"]
mus_t = hmm_model_env["mus_t"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
Gamma_rv = hmm_model_env["Gamma_rv"]
Gamma_in = hmm_model_env["Gamma_in"]
S_in = hmm_model_env["S_in"]
S_t = hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `Gamma` (i.e. a non-sequence inner-input)
scan_args_copy = copy(scan_args)
test_v = Gamma_in
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
assert Gamma_in in removed_nodes
assert Gamma_rv in removed_nodes
assert S_in in removed_nodes
assert S_t in removed_nodes
assert mus_in in scan_args_copy.outer_in_seqs
assert sigmas_in in scan_args_copy.outer_in_seqs
assert mus_t in scan_args_copy.inner_in_seqs
assert sigmas_t in scan_args_copy.inner_in_seqs
assert rng_tt in scan_args_copy.outer_in_shared
assert rng_in in scan_args_copy.inner_out_shared
assert rng_updates in scan_args.outer_out_shared
def test_ScanArgs_remove_shared_inner_output():
hmm_model_env = create_test_hmm()
scan_args = hmm_model_env["scan_args"]
hmm_model_env["scan_op"]
hmm_model_env["Y_t"]
Y_rv = hmm_model_env["Y_rv"]
mus_in = hmm_model_env["mus_in"]
mus_t = hmm_model_env["mus_t"]
sigmas_in = hmm_model_env["sigmas_in"]
sigmas_t = hmm_model_env["sigmas_t"]
hmm_model_env["Gamma_rv"]
hmm_model_env["Gamma_in"]
S_in = hmm_model_env["S_in"]
hmm_model_env["S_t"]
rng_tt = hmm_model_env["rng_tt"]
rng_in = hmm_model_env["rng_in"]
rng_updates = hmm_model_env["rng_updates"]
# Remove `rng` (i.e. a shared inner-output)
scan_args_copy = copy(scan_args)
test_v = rng_updates
rm_info = scan_args_copy.remove_from_fields(test_v, rm_dependents=True)
removed_nodes, _ = zip(*rm_info)
assert rng_tt in removed_nodes
assert rng_in in removed_nodes
assert rng_updates in removed_nodes
assert Y_rv in removed_nodes
assert S_in in removed_nodes
assert sigmas_in in scan_args_copy.outer_in_seqs
assert sigmas_t in scan_args_copy.inner_in_seqs
assert mus_in in scan_args_copy.outer_in_seqs
assert mus_t in scan_args_copy.inner_in_seqs
|
[
"aesara.scan.utils.ScanArgs.from_node",
"aesara.tensor.iscalar",
"aesara.tensor.random.normal",
"numpy.random.SeedSequence",
"pytest.fixture",
"aesara.shared",
"aesara.scan",
"copy.copy",
"numpy.random.default_rng",
"pytest.raises",
"aesara.tensor.random.categorical",
"aesara.scan.utils.ScanArgs.create_empty",
"aesara.tensor.ones",
"numpy.arange",
"aesara.tensor.stack",
"aesara.tensor.matrix",
"aesara.config.change_flags"
] |
[((145, 189), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""', 'autouse': '(True)'}), "(scope='module', autouse=True)\n", (159, 189), False, 'import pytest\n'), ((335, 363), 'numpy.random.default_rng', 'np.random.default_rng', (['(23422)'], {}), '(23422)\n', (356, 363), True, 'import numpy as np\n'), ((377, 426), 'aesara.shared', 'aesara.shared', (['rng_state'], {'name': '"""rng"""', 'borrow': '(True)'}), "(rng_state, name='rng', borrow=True)\n", (390, 426), False, 'import aesara\n'), ((503, 518), 'aesara.tensor.iscalar', 'at.iscalar', (['"""N"""'], {}), "('N')\n", (513, 518), True, 'from aesara import tensor as at\n'), ((559, 574), 'aesara.tensor.iscalar', 'at.iscalar', (['"""M"""'], {}), "('M')\n", (569, 574), True, 'from aesara import tensor as at\n'), ((617, 633), 'aesara.tensor.matrix', 'at.matrix', (['"""mus"""'], {}), "('mus')\n", (626, 633), True, 'from aesara import tensor as at\n'), ((787, 803), 'aesara.tensor.ones', 'at.ones', (['(N_tt,)'], {}), '((N_tt,))\n', (794, 803), True, 'from aesara import tensor as at\n'), ((1010, 1064), 'aesara.tensor.random.categorical', 'at.random.categorical', (['pi_0_rv'], {'rng': 'rng_tt', 'name': '"""S_0"""'}), "(pi_0_rv, rng=rng_tt, name='S_0')\n", (1031, 1064), True, 'from aesara import tensor as at\n'), ((1324, 1504), 'aesara.scan', 'aesara.scan', ([], {'fn': 'scan_fn', 'sequences': '[mus_tt, sigmas_tt]', 'non_sequences': '[Gamma_rv, rng_tt]', 'outputs_info': "[{'initial': S_0_rv, 'taps': [-1]}, {}]", 'strict': '(True)', 'name': '"""scan_rv"""'}), "(fn=scan_fn, sequences=[mus_tt, sigmas_tt], non_sequences=[\n Gamma_rv, rng_tt], outputs_info=[{'initial': S_0_rv, 'taps': [-1]}, {}],\n strict=True, name='scan_rv')\n", (1335, 1504), False, 'import aesara\n'), ((1619, 1649), 'aesara.scan.utils.ScanArgs.from_node', 'ScanArgs.from_node', (['Y_rv.owner'], {}), '(Y_rv.owner)\n', (1637, 1649), False, 'from aesara.scan.utils import ScanArgs\n'), ((2385, 2408), 'aesara.scan.utils.ScanArgs.create_empty', 'ScanArgs.create_empty', ([], {}), '()\n', (2406, 2408), False, 'from aesara.scan.utils import ScanArgs\n'), ((4598, 4613), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (4602, 4613), False, 'from copy import copy\n'), ((4950, 4999), 'aesara.shared', 'aesara.shared', (['rng_state'], {'name': '"""rng"""', 'borrow': '(True)'}), "(rng_state, name='rng', borrow=True)\n", (4963, 4999), False, 'import aesara\n'), ((5076, 5091), 'aesara.tensor.iscalar', 'at.iscalar', (['"""N"""'], {}), "('N')\n", (5086, 5091), True, 'from aesara import tensor as at\n'), ((5132, 5147), 'aesara.tensor.iscalar', 'at.iscalar', (['"""M"""'], {}), "('M')\n", (5142, 5147), True, 'from aesara import tensor as at\n'), ((5190, 5206), 'aesara.tensor.matrix', 'at.matrix', (['"""mus"""'], {}), "('mus')\n", (5199, 5206), True, 'from aesara import tensor as at\n'), ((5360, 5376), 'aesara.tensor.ones', 'at.ones', (['(N_tt,)'], {}), '((N_tt,))\n', (5367, 5376), True, 'from aesara import tensor as at\n'), ((5583, 5637), 'aesara.tensor.random.categorical', 'at.random.categorical', (['pi_0_rv'], {'rng': 'rng_tt', 'name': '"""S_0"""'}), "(pi_0_rv, rng=rng_tt, name='S_0')\n", (5604, 5637), True, 'from aesara import tensor as at\n'), ((6590, 6620), 'aesara.scan.utils.ScanArgs.from_node', 'ScanArgs.from_node', (['Y_rv.owner'], {}), '(Y_rv.owner)\n', (6608, 6620), False, 'from aesara.scan.utils import ScanArgs\n'), ((7881, 7896), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (7885, 7896), False, 'from copy import copy\n'), ((8240, 8255), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (8244, 8255), False, 'from copy import copy\n'), ((10616, 10631), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (10620, 10631), False, 'from copy import copy\n'), ((12376, 12391), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (12380, 12391), False, 'from copy import copy\n'), ((13815, 13830), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (13819, 13830), False, 'from copy import copy\n'), ((15361, 15376), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (15365, 15376), False, 'from copy import copy\n'), ((16791, 16806), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (16795, 16806), False, 'from copy import copy\n'), ((18129, 18144), 'copy.copy', 'copy', (['scan_args'], {}), '(scan_args)\n', (18133, 18144), False, 'from copy import copy\n'), ((223, 278), 'aesara.config.change_flags', 'aesara.config.change_flags', ([], {'cxx': '""""""', 'mode': '"""FAST_COMPILE"""'}), "(cxx='', mode='FAST_COMPILE')\n", (249, 278), False, 'import aesara\n'), ((869, 885), 'aesara.tensor.ones', 'at.ones', (['(M_tt,)'], {}), '((M_tt,))\n', (876, 885), True, 'from aesara import tensor as at\n'), ((947, 968), 'aesara.tensor.ones', 'at.ones', (['(M_tt, M_tt)'], {}), '((M_tt, M_tt))\n', (954, 968), True, 'from aesara import tensor as at\n'), ((1134, 1192), 'aesara.tensor.random.categorical', 'at.random.categorical', (['Gamma_t[S_tm1]'], {'rng': 'rng', 'name': '"""S_t"""'}), "(Gamma_t[S_tm1], rng=rng, name='S_t')\n", (1155, 1192), True, 'from aesara import tensor as at\n'), ((1207, 1265), 'aesara.tensor.random.normal', 'at.random.normal', (['mus_t[S_t]', 'sigma_t'], {'rng': 'rng', 'name': '"""Y_t"""'}), "(mus_t[S_t], sigma_t, rng=rng, name='Y_t')\n", (1223, 1265), True, 'from aesara import tensor as at\n'), ((2596, 2620), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2609, 2620), False, 'import pytest\n'), ((5442, 5458), 'aesara.tensor.ones', 'at.ones', (['(M_tt,)'], {}), '((M_tt,))\n', (5449, 5458), True, 'from aesara import tensor as at\n'), ((5520, 5541), 'aesara.tensor.ones', 'at.ones', (['(M_tt, M_tt)'], {}), '((M_tt, M_tt))\n', (5527, 5541), True, 'from aesara import tensor as at\n'), ((5714, 5772), 'aesara.tensor.random.categorical', 'at.random.categorical', (['Gamma_t[S_tm2]'], {'rng': 'rng', 'name': '"""S_t"""'}), "(Gamma_t[S_tm2], rng=rng, name='S_t')\n", (5735, 5772), True, 'from aesara import tensor as at\n'), ((5787, 5847), 'aesara.tensor.random.normal', 'at.random.normal', (['mus_t[S_tm1]', 'sigma_t'], {'rng': 'rng', 'name': '"""Y_t"""'}), "(mus_t[S_tm1], sigma_t, rng=rng, name='Y_t')\n", (5803, 5847), True, 'from aesara import tensor as at\n'), ((6925, 6935), 'aesara.tensor.ones', 'at.ones', (['(2)'], {}), '(2)\n', (6932, 6935), True, 'from aesara import tensor as at\n'), ((9848, 9873), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9861, 9873), False, 'import pytest\n'), ((4906, 4934), 'numpy.random.SeedSequence', 'np.random.SeedSequence', (['(1234)'], {}), '(1234)\n', (4928, 4934), True, 'import numpy as np\n'), ((2649, 2659), 'aesara.tensor.ones', 'at.ones', (['(2)'], {}), '(2)\n', (2656, 2659), True, 'from aesara import tensor as at\n'), ((681, 699), 'numpy.arange', 'np.arange', (['(0.0)', '(10)'], {}), '(0.0, 10)\n', (690, 699), True, 'import numpy as np\n'), ((701, 724), 'numpy.arange', 'np.arange', (['(0.0)', '(-10)', '(-1)'], {}), '(0.0, -10, -1)\n', (710, 724), True, 'import numpy as np\n'), ((5254, 5272), 'numpy.arange', 'np.arange', (['(0.0)', '(10)'], {}), '(0.0, 10)\n', (5263, 5272), True, 'import numpy as np\n'), ((5274, 5297), 'numpy.arange', 'np.arange', (['(0.0)', '(-10)', '(-1)'], {}), '(0.0, -10, -1)\n', (5283, 5297), True, 'import numpy as np\n'), ((6054, 6080), 'aesara.tensor.stack', 'at.stack', (['[S_0_rv, S_0_rv]'], {}), '([S_0_rv, S_0_rv])\n', (6062, 6080), True, 'from aesara import tensor as at\n')]
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator, ScalarFormatter
from .specutils import Spectrum1D
def plot_spectrum(spec, wlmin=None, wlmax=None, ax=None,
dxmaj=None, dxmin=None, dymaj=None, dymin=None,
fillcolor="#cccccc",fillalpha=1,
**kwargs):
if ax is None:
fig, ax = plt.subplots()
wave = spec.dispersion
flux = spec.flux
errs = spec.ivar**-0.5
ii = np.ones(len(wave), dtype=bool)
if wlmin is not None:
ii = ii & (wave > wlmin)
if wlmax is not None:
ii = ii & (wave < wlmax)
wave = wave[ii]
flux = flux[ii]
errs = errs[ii]
y1 = flux-errs
y2 = flux+errs
fill_between_steps(ax, wave, y1, y2, alpha=fillalpha, facecolor=fillcolor, edgecolor=fillcolor)
ax.plot(wave, flux, **kwargs)
ax.xaxis.set_major_formatter(ScalarFormatter(useOffset=False))
if dxmaj is not None: ax.xaxis.set_major_locator(MultipleLocator(dxmaj))
if dxmin is not None: ax.xaxis.set_minor_locator(MultipleLocator(dxmin))
if dymaj is not None: ax.yaxis.set_major_locator(MultipleLocator(dymaj))
if dymin is not None: ax.yaxis.set_minor_locator(MultipleLocator(dymin))
return ax
def fill_between_steps(ax, x, y1, y2=0, h_align='mid', **kwargs):
"""
Fill between for step plots in matplotlib.
**kwargs will be passed to the matplotlib fill_between() function.
"""
# First, duplicate the x values
xx = x.repeat(2)[1:]
# Now: the average x binwidth
xstep = np.repeat((x[1:] - x[:-1]), 2)
xstep = np.concatenate(([xstep[0]], xstep, [xstep[-1]]))
# Now: add one step at end of row.
xx = np.append(xx, xx.max() + xstep[-1])
# Make it possible to chenge step alignment.
if h_align == 'mid':
xx -= xstep / 2.
elif h_align == 'right':
xx -= xstep
# Also, duplicate each y coordinate in both arrays
y1 = y1.repeat(2)#[:-1]
if type(y2) == np.ndarray:
y2 = y2.repeat(2)#[:-1]
# now to the plotting part:
return ax.fill_between(xx, y1, y2=y2, **kwargs)
|
[
"numpy.repeat",
"matplotlib.ticker.MultipleLocator",
"matplotlib.pyplot.subplots",
"numpy.concatenate",
"matplotlib.ticker.ScalarFormatter"
] |
[((1580, 1608), 'numpy.repeat', 'np.repeat', (['(x[1:] - x[:-1])', '(2)'], {}), '(x[1:] - x[:-1], 2)\n', (1589, 1608), True, 'import numpy as np\n'), ((1623, 1671), 'numpy.concatenate', 'np.concatenate', (['([xstep[0]], xstep, [xstep[-1]])'], {}), '(([xstep[0]], xstep, [xstep[-1]]))\n', (1637, 1671), True, 'import numpy as np\n'), ((390, 404), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (402, 404), True, 'import matplotlib.pyplot as plt\n'), ((914, 946), 'matplotlib.ticker.ScalarFormatter', 'ScalarFormatter', ([], {'useOffset': '(False)'}), '(useOffset=False)\n', (929, 946), False, 'from matplotlib.ticker import MultipleLocator, ScalarFormatter\n'), ((1001, 1023), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['dxmaj'], {}), '(dxmaj)\n', (1016, 1023), False, 'from matplotlib.ticker import MultipleLocator, ScalarFormatter\n'), ((1078, 1100), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['dxmin'], {}), '(dxmin)\n', (1093, 1100), False, 'from matplotlib.ticker import MultipleLocator, ScalarFormatter\n'), ((1155, 1177), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['dymaj'], {}), '(dymaj)\n', (1170, 1177), False, 'from matplotlib.ticker import MultipleLocator, ScalarFormatter\n'), ((1232, 1254), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['dymin'], {}), '(dymin)\n', (1247, 1254), False, 'from matplotlib.ticker import MultipleLocator, ScalarFormatter\n')]
|
# py-motmetrics - Metrics for multiple object tracker (MOT) benchmarking.
# https://github.com/cheind/py-motmetrics/
#
# MIT License
# Copyright (c) 2017-2020 <NAME>, <NAME> and others.
# See LICENSE file for terms.
"""Accumulate tracking events frame by frame."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import OrderedDict
import itertools
import numpy as np
import pandas as pd
from motmetrics.lap import linear_sum_assignment
_INDEX_FIELDS = ['FrameId', 'Event']
_EVENT_FIELDS = ['Type', 'OId', 'HId', 'D']
class MOTAccumulator(object):
"""Manage tracking events.
This class computes per-frame tracking events from a given set of object / hypothesis
ids and pairwise distances. Indended usage
import motmetrics as mm
acc = mm.MOTAccumulator()
acc.update(['a', 'b'], [0, 1, 2], dists, frameid=0)
...
acc.update(['d'], [6,10], other_dists, frameid=76)
summary = mm.metrics.summarize(acc)
print(mm.io.render_summary(summary))
Update is called once per frame and takes objects / hypothesis ids and a pairwise distance
matrix between those (see distances module for support). Per frame max(len(objects), len(hypothesis))
events are generated. Each event type is one of the following
- `'MATCH'` a match between a object and hypothesis was found
- `'SWITCH'` a match between a object and hypothesis was found but differs from previous assignment (hypothesisid != previous)
- `'MISS'` no match for an object was found
- `'FP'` no match for an hypothesis was found (spurious detections)
- `'RAW'` events corresponding to raw input
- `'TRANSFER'` a match between a object and hypothesis was found but differs from previous assignment (objectid != previous)
- `'ASCEND'` a match between a object and hypothesis was found but differs from previous assignment (hypothesisid is new)
- `'MIGRATE'` a match between a object and hypothesis was found but differs from previous assignment (objectid is new)
Events are tracked in a pandas Dataframe. The dataframe is hierarchically indexed by (`FrameId`, `EventId`),
where `FrameId` is either provided during the call to `update` or auto-incremented when `auto_id` is set
true during construction of MOTAccumulator. `EventId` is auto-incremented. The dataframe has the following
columns
- `Type` one of `('MATCH', 'SWITCH', 'MISS', 'FP', 'RAW')`
- `OId` object id or np.nan when `'FP'` or `'RAW'` and object is not present
- `HId` hypothesis id or np.nan when `'MISS'` or `'RAW'` and hypothesis is not present
- `D` distance or np.nan when `'FP'` or `'MISS'` or `'RAW'` and either object/hypothesis is absent
From the events and associated fields the entire tracking history can be recovered. Once the accumulator
has been populated with per-frame data use `metrics.summarize` to compute statistics. See `metrics.compute_metrics`
for a list of metrics computed.
References
----------
1. Bernardin, Keni, and <NAME>. "Evaluating multiple object tracking performance: the CLEAR MOT metrics."
EURASIP Journal on Image and Video Processing 2008.1 (2008): 1-10.
2. Milan, Anton, et al. "Mot16: A benchmark for multi-object tracking." arXiv preprint arXiv:1603.00831 (2016).
3. <NAME>, <NAME>, and <NAME>. "Learning to associate: Hybridboosted multi-target tracker for crowded scene."
Computer Vision and Pattern Recognition, 2009. CVPR 2009. IEEE Conference on. IEEE, 2009.
"""
def __init__(self, auto_id=False, max_switch_time=float('inf')):
"""Create a MOTAccumulator.
Params
------
auto_id : bool, optional
Whether or not frame indices are auto-incremented or provided upon
updating. Defaults to false. Not specifying a frame-id when this value
is true results in an error. Specifying a frame-id when this value is
false also results in an error.
max_switch_time : scalar, optional
Allows specifying an upper bound on the timespan an unobserved but
tracked object is allowed to generate track switch events. Useful if groundtruth
objects leaving the field of view keep their ID when they reappear,
but your tracker is not capable of recognizing this (resulting in
track switch events). The default is that there is no upper bound
on the timespan. In units of frame timestamps. When using auto_id
in units of count.
"""
# Parameters of the accumulator.
self.auto_id = auto_id
self.max_switch_time = max_switch_time
# Accumulator state.
self._events = None
self._indices = None
self.m = None
self.res_m = None
self.last_occurrence = None
self.last_match = None
self.hypHistory = None
self.dirty_events = None
self.cached_events_df = None
self.reset()
def reset(self):
"""Reset the accumulator to empty state."""
self._events = {field: [] for field in _EVENT_FIELDS}
self._indices = {field: [] for field in _INDEX_FIELDS}
self.m = {} # Pairings up to current timestamp
self.res_m = {} # Result pairings up to now
self.last_occurrence = {} # Tracks most recent occurance of object
self.last_match = {} # Tracks most recent match of object
self.hypHistory = {}
self.dirty_events = True
self.cached_events_df = None
def _append_to_indices(self, frameid, eid):
self._indices['FrameId'].append(frameid)
self._indices['Event'].append(eid)
def _append_to_events(self, typestr, oid, hid, distance):
self._events['Type'].append(typestr)
self._events['OId'].append(oid)
self._events['HId'].append(hid)
self._events['D'].append(distance)
def update(self, oids, hids, dists, frameid=None, vf=''):
"""Updates the accumulator with frame specific objects/detections.
This method generates events based on the following algorithm [1]:
1. Try to carry forward already established tracks. If any paired object / hypothesis
from previous timestamps are still visible in the current frame, create a 'MATCH'
event between them.
2. For the remaining constellations minimize the total object / hypothesis distance
error (Kuhn-Munkres algorithm). If a correspondence made contradicts a previous
match create a 'SWITCH' else a 'MATCH' event.
3. Create 'MISS' events for all remaining unassigned objects.
4. Create 'FP' events for all remaining unassigned hypotheses.
Params
------
oids : N array
Array of object ids.
hids : M array
Array of hypothesis ids.
dists: NxM array
Distance matrix. np.nan values to signal do-not-pair constellations.
See `distances` module for support methods.
Kwargs
------
frameId : id
Unique frame id. Optional when MOTAccumulator.auto_id is specified during
construction.
vf: file to log details
Returns
-------
frame_events : pd.DataFrame
Dataframe containing generated events
References
----------
1. Bernardin, Keni, and <NAME>. "Evaluating multiple object tracking performance: the CLEAR MOT metrics."
EURASIP Journal on Image and Video Processing 2008.1 (2008): 1-10.
"""
# pylint: disable=too-many-locals, too-many-statements
self.dirty_events = True
oids = np.asarray(oids)
oids_masked = np.zeros_like(oids, dtype=np.bool_)
hids = np.asarray(hids)
hids_masked = np.zeros_like(hids, dtype=np.bool_)
dists = np.atleast_2d(dists).astype(float).reshape(oids.shape[0], hids.shape[0]).copy()
if frameid is None:
assert self.auto_id, 'auto-id is not enabled'
if len(self._indices['FrameId']) > 0:
frameid = self._indices['FrameId'][-1] + 1
else:
frameid = 0
else:
assert not self.auto_id, 'Cannot provide frame id when auto-id is enabled'
eid = itertools.count()
# 0. Record raw events
no = len(oids)
nh = len(hids)
# Add a RAW event simply to ensure the frame is counted.
self._append_to_indices(frameid, next(eid))
self._append_to_events('RAW', np.nan, np.nan, np.nan)
# There must be at least one RAW event per object and hypothesis.
# Record all finite distances as RAW events.
valid_i, valid_j = np.where(np.isfinite(dists))
valid_dists = dists[valid_i, valid_j]
for i, j, dist_ij in zip(valid_i, valid_j, valid_dists):
self._append_to_indices(frameid, next(eid))
self._append_to_events('RAW', oids[i], hids[j], dist_ij)
# Add a RAW event for objects and hypotheses that were present but did
# not overlap with anything.
used_i = np.unique(valid_i)
used_j = np.unique(valid_j)
unused_i = np.setdiff1d(np.arange(no), used_i)
unused_j = np.setdiff1d(np.arange(nh), used_j)
for oid in oids[unused_i]:
self._append_to_indices(frameid, next(eid))
self._append_to_events('RAW', oid, np.nan, np.nan)
for hid in hids[unused_j]:
self._append_to_indices(frameid, next(eid))
self._append_to_events('RAW', np.nan, hid, np.nan)
if oids.size * hids.size > 0:
# 1. Try to re-establish tracks from previous correspondences
for i in range(oids.shape[0]):
# No need to check oids_masked[i] here.
if oids[i] not in self.m:
continue
hprev = self.m[oids[i]]
j, = np.where(~hids_masked & (hids == hprev))
if j.shape[0] == 0:
continue
j = j[0]
if np.isfinite(dists[i, j]):
o = oids[i]
h = hids[j]
oids_masked[i] = True
hids_masked[j] = True
self.m[oids[i]] = hids[j]
self._append_to_indices(frameid, next(eid))
self._append_to_events('MATCH', oids[i], hids[j], dists[i, j])
self.last_match[o] = frameid
self.hypHistory[h] = frameid
# 2. Try to remaining objects/hypotheses
dists[oids_masked, :] = np.nan
dists[:, hids_masked] = np.nan
rids, cids = linear_sum_assignment(dists)
for i, j in zip(rids, cids):
if not np.isfinite(dists[i, j]):
continue
o = oids[i]
h = hids[j]
is_switch = (o in self.m and
self.m[o] != h and
abs(frameid - self.last_occurrence[o]) <= self.max_switch_time)
cat1 = 'SWITCH' if is_switch else 'MATCH'
if cat1 == 'SWITCH':
if h not in self.hypHistory:
subcat = 'ASCEND'
self._append_to_indices(frameid, next(eid))
self._append_to_events(subcat, oids[i], hids[j], dists[i, j])
# ignore the last condition temporarily
is_transfer = (h in self.res_m and
self.res_m[h] != o)
# is_transfer = (h in self.res_m and
# self.res_m[h] != o and
# abs(frameid - self.last_occurrence[o]) <= self.max_switch_time)
cat2 = 'TRANSFER' if is_transfer else 'MATCH'
if cat2 == 'TRANSFER':
if o not in self.last_match:
subcat = 'MIGRATE'
self._append_to_indices(frameid, next(eid))
self._append_to_events(subcat, oids[i], hids[j], dists[i, j])
self._append_to_indices(frameid, next(eid))
self._append_to_events(cat2, oids[i], hids[j], dists[i, j])
if vf != '' and (cat1 != 'MATCH' or cat2 != 'MATCH'):
if cat1 == 'SWITCH':
vf.write('%s %d %d %d %d %d\n' % (subcat[:2], o, self.last_match[o], self.m[o], frameid, h))
if cat2 == 'TRANSFER':
vf.write('%s %d %d %d %d %d\n' % (subcat[:2], h, self.hypHistory[h], self.res_m[h], frameid, o))
self.hypHistory[h] = frameid
self.last_match[o] = frameid
self._append_to_indices(frameid, next(eid))
self._append_to_events(cat1, oids[i], hids[j], dists[i, j])
oids_masked[i] = True
hids_masked[j] = True
self.m[o] = h
self.res_m[h] = o
# 3. All remaining objects are missed
for o in oids[~oids_masked]:
self._append_to_indices(frameid, next(eid))
self._append_to_events('MISS', o, np.nan, np.nan)
if vf != '':
vf.write('FN %d %d\n' % (frameid, o))
# 4. All remaining hypotheses are false alarms
for h in hids[~hids_masked]:
self._append_to_indices(frameid, next(eid))
self._append_to_events('FP', np.nan, h, np.nan)
if vf != '':
vf.write('FP %d %d\n' % (frameid, h))
# 5. Update occurance state
for o in oids:
self.last_occurrence[o] = frameid
return frameid
@property
def events(self):
if self.dirty_events:
self.cached_events_df = MOTAccumulator.new_event_dataframe_with_data(self._indices, self._events)
self.dirty_events = False
return self.cached_events_df
@property
def mot_events(self):
df = self.events
return df[df.Type != 'RAW']
@staticmethod
def new_event_dataframe():
"""Create a new DataFrame for event tracking."""
idx = pd.MultiIndex(levels=[[], []], codes=[[], []], names=['FrameId', 'Event'])
cats = pd.Categorical([], categories=['RAW', 'FP', 'MISS', 'SWITCH', 'MATCH', 'TRANSFER', 'ASCEND', 'MIGRATE'])
df = pd.DataFrame(
OrderedDict([
('Type', pd.Series(cats)), # Type of event. One of FP (false positive), MISS, SWITCH, MATCH
('OId', pd.Series(dtype=float)), # Object ID or -1 if FP. Using float as missing values will be converted to NaN anyways.
('HId', pd.Series(dtype=float)), # Hypothesis ID or NaN if MISS. Using float as missing values will be converted to NaN anyways.
('D', pd.Series(dtype=float)), # Distance or NaN when FP or MISS
]),
index=idx
)
return df
@staticmethod
def new_event_dataframe_with_data(indices, events):
"""Create a new DataFrame filled with data.
Params
------
indices: dict
dict of lists with fields 'FrameId' and 'Event'
events: dict
dict of lists with fields 'Type', 'OId', 'HId', 'D'
"""
if len(events) == 0:
return MOTAccumulator.new_event_dataframe()
raw_type = pd.Categorical(
events['Type'],
categories=['RAW', 'FP', 'MISS', 'SWITCH', 'MATCH', 'TRANSFER', 'ASCEND', 'MIGRATE'],
ordered=False)
series = [
pd.Series(raw_type, name='Type'),
pd.Series(events['OId'], dtype=float, name='OId'),
pd.Series(events['HId'], dtype=float, name='HId'),
pd.Series(events['D'], dtype=float, name='D')
]
idx = pd.MultiIndex.from_arrays(
[indices[field] for field in _INDEX_FIELDS],
names=_INDEX_FIELDS)
df = pd.concat(series, axis=1)
df.index = idx
return df
@staticmethod
def merge_analysis(anas, infomap):
# pylint: disable=missing-function-docstring
res = {'hyp': {}, 'obj': {}}
mapp = {'hyp': 'hid_map', 'obj': 'oid_map'}
for ana, infom in zip(anas, infomap):
if ana is None:
return None
for t in ana.keys():
which = mapp[t]
if np.nan in infom[which]:
res[t][int(infom[which][np.nan])] = 0
if 'nan' in infom[which]:
res[t][int(infom[which]['nan'])] = 0
for _id, cnt in ana[t].items():
if _id not in infom[which]:
_id = str(_id)
res[t][int(infom[which][_id])] = cnt
return res
@staticmethod
def merge_event_dataframes(dfs, update_frame_indices=True, update_oids=True, update_hids=True, return_mappings=False):
"""Merge dataframes.
Params
------
dfs : list of pandas.DataFrame or MotAccumulator
A list of event containers to merge
Kwargs
------
update_frame_indices : boolean, optional
Ensure that frame indices are unique in the merged container
update_oids : boolean, unique
Ensure that object ids are unique in the merged container
update_hids : boolean, unique
Ensure that hypothesis ids are unique in the merged container
return_mappings : boolean, unique
Whether or not to return mapping information
Returns
-------
df : pandas.DataFrame
Merged event data frame
"""
mapping_infos = []
new_oid = itertools.count()
new_hid = itertools.count()
r = MOTAccumulator.new_event_dataframe()
for df in dfs:
if isinstance(df, MOTAccumulator):
df = df.events
copy = df.copy()
infos = {}
# Update index
if update_frame_indices:
# pylint: disable=cell-var-from-loop
next_frame_id = max(r.index.get_level_values(0).max() + 1, r.index.get_level_values(0).unique().shape[0])
if np.isnan(next_frame_id):
next_frame_id = 0
copy.index = copy.index.map(lambda x: (x[0] + next_frame_id, x[1]))
infos['frame_offset'] = next_frame_id
# Update object / hypothesis ids
if update_oids:
# pylint: disable=cell-var-from-loop
oid_map = dict([oid, str(next(new_oid))] for oid in copy['OId'].dropna().unique())
copy['OId'] = copy['OId'].map(lambda x: oid_map[x], na_action='ignore')
infos['oid_map'] = oid_map
if update_hids:
# pylint: disable=cell-var-from-loop
hid_map = dict([hid, str(next(new_hid))] for hid in copy['HId'].dropna().unique())
copy['HId'] = copy['HId'].map(lambda x: hid_map[x], na_action='ignore')
infos['hid_map'] = hid_map
r = r.append(copy)
mapping_infos.append(infos)
if return_mappings:
return r, mapping_infos
else:
return r
|
[
"numpy.zeros_like",
"pandas.MultiIndex.from_arrays",
"numpy.asarray",
"motmetrics.lap.linear_sum_assignment",
"numpy.isfinite",
"itertools.count",
"numpy.isnan",
"numpy.where",
"numpy.arange",
"pandas.Series",
"pandas.Categorical",
"pandas.MultiIndex",
"pandas.concat",
"numpy.unique",
"numpy.atleast_2d"
] |
[((7820, 7836), 'numpy.asarray', 'np.asarray', (['oids'], {}), '(oids)\n', (7830, 7836), True, 'import numpy as np\n'), ((7859, 7894), 'numpy.zeros_like', 'np.zeros_like', (['oids'], {'dtype': 'np.bool_'}), '(oids, dtype=np.bool_)\n', (7872, 7894), True, 'import numpy as np\n'), ((7910, 7926), 'numpy.asarray', 'np.asarray', (['hids'], {}), '(hids)\n', (7920, 7926), True, 'import numpy as np\n'), ((7949, 7984), 'numpy.zeros_like', 'np.zeros_like', (['hids'], {'dtype': 'np.bool_'}), '(hids, dtype=np.bool_)\n', (7962, 7984), True, 'import numpy as np\n'), ((8439, 8456), 'itertools.count', 'itertools.count', ([], {}), '()\n', (8454, 8456), False, 'import itertools\n'), ((9269, 9287), 'numpy.unique', 'np.unique', (['valid_i'], {}), '(valid_i)\n', (9278, 9287), True, 'import numpy as np\n'), ((9305, 9323), 'numpy.unique', 'np.unique', (['valid_j'], {}), '(valid_j)\n', (9314, 9323), True, 'import numpy as np\n'), ((14389, 14463), 'pandas.MultiIndex', 'pd.MultiIndex', ([], {'levels': '[[], []]', 'codes': '[[], []]', 'names': "['FrameId', 'Event']"}), "(levels=[[], []], codes=[[], []], names=['FrameId', 'Event'])\n", (14402, 14463), True, 'import pandas as pd\n'), ((14479, 14587), 'pandas.Categorical', 'pd.Categorical', (['[]'], {'categories': "['RAW', 'FP', 'MISS', 'SWITCH', 'MATCH', 'TRANSFER', 'ASCEND', 'MIGRATE']"}), "([], categories=['RAW', 'FP', 'MISS', 'SWITCH', 'MATCH',\n 'TRANSFER', 'ASCEND', 'MIGRATE'])\n", (14493, 14587), True, 'import pandas as pd\n'), ((15642, 15777), 'pandas.Categorical', 'pd.Categorical', (["events['Type']"], {'categories': "['RAW', 'FP', 'MISS', 'SWITCH', 'MATCH', 'TRANSFER', 'ASCEND', 'MIGRATE']", 'ordered': '(False)'}), "(events['Type'], categories=['RAW', 'FP', 'MISS', 'SWITCH',\n 'MATCH', 'TRANSFER', 'ASCEND', 'MIGRATE'], ordered=False)\n", (15656, 15777), True, 'import pandas as pd\n'), ((16085, 16180), 'pandas.MultiIndex.from_arrays', 'pd.MultiIndex.from_arrays', (['[indices[field] for field in _INDEX_FIELDS]'], {'names': '_INDEX_FIELDS'}), '([indices[field] for field in _INDEX_FIELDS],\n names=_INDEX_FIELDS)\n', (16110, 16180), True, 'import pandas as pd\n'), ((16215, 16240), 'pandas.concat', 'pd.concat', (['series'], {'axis': '(1)'}), '(series, axis=1)\n', (16224, 16240), True, 'import pandas as pd\n'), ((17996, 18013), 'itertools.count', 'itertools.count', ([], {}), '()\n', (18011, 18013), False, 'import itertools\n'), ((18032, 18049), 'itertools.count', 'itertools.count', ([], {}), '()\n', (18047, 18049), False, 'import itertools\n'), ((8880, 8898), 'numpy.isfinite', 'np.isfinite', (['dists'], {}), '(dists)\n', (8891, 8898), True, 'import numpy as np\n'), ((9356, 9369), 'numpy.arange', 'np.arange', (['no'], {}), '(no)\n', (9365, 9369), True, 'import numpy as np\n'), ((9411, 9424), 'numpy.arange', 'np.arange', (['nh'], {}), '(nh)\n', (9420, 9424), True, 'import numpy as np\n'), ((10870, 10898), 'motmetrics.lap.linear_sum_assignment', 'linear_sum_assignment', (['dists'], {}), '(dists)\n', (10891, 10898), False, 'from motmetrics.lap import linear_sum_assignment\n'), ((15842, 15874), 'pandas.Series', 'pd.Series', (['raw_type'], {'name': '"""Type"""'}), "(raw_type, name='Type')\n", (15851, 15874), True, 'import pandas as pd\n'), ((15888, 15937), 'pandas.Series', 'pd.Series', (["events['OId']"], {'dtype': 'float', 'name': '"""OId"""'}), "(events['OId'], dtype=float, name='OId')\n", (15897, 15937), True, 'import pandas as pd\n'), ((15951, 16000), 'pandas.Series', 'pd.Series', (["events['HId']"], {'dtype': 'float', 'name': '"""HId"""'}), "(events['HId'], dtype=float, name='HId')\n", (15960, 16000), True, 'import pandas as pd\n'), ((16014, 16059), 'pandas.Series', 'pd.Series', (["events['D']"], {'dtype': 'float', 'name': '"""D"""'}), "(events['D'], dtype=float, name='D')\n", (16023, 16059), True, 'import pandas as pd\n'), ((10087, 10127), 'numpy.where', 'np.where', (['(~hids_masked & (hids == hprev))'], {}), '(~hids_masked & (hids == hprev))\n', (10095, 10127), True, 'import numpy as np\n'), ((10238, 10262), 'numpy.isfinite', 'np.isfinite', (['dists[i, j]'], {}), '(dists[i, j])\n', (10249, 10262), True, 'import numpy as np\n'), ((18514, 18537), 'numpy.isnan', 'np.isnan', (['next_frame_id'], {}), '(next_frame_id)\n', (18522, 18537), True, 'import numpy as np\n'), ((10964, 10988), 'numpy.isfinite', 'np.isfinite', (['dists[i, j]'], {}), '(dists[i, j])\n', (10975, 10988), True, 'import numpy as np\n'), ((14662, 14677), 'pandas.Series', 'pd.Series', (['cats'], {}), '(cats)\n', (14671, 14677), True, 'import pandas as pd\n'), ((14778, 14800), 'pandas.Series', 'pd.Series', ([], {'dtype': 'float'}), '(dtype=float)\n', (14787, 14800), True, 'import pandas as pd\n'), ((14921, 14943), 'pandas.Series', 'pd.Series', ([], {'dtype': 'float'}), '(dtype=float)\n', (14930, 14943), True, 'import pandas as pd\n'), ((15069, 15091), 'pandas.Series', 'pd.Series', ([], {'dtype': 'float'}), '(dtype=float)\n', (15078, 15091), True, 'import pandas as pd\n'), ((8001, 8021), 'numpy.atleast_2d', 'np.atleast_2d', (['dists'], {}), '(dists)\n', (8014, 8021), True, 'import numpy as np\n')]
|
from os import system
import threading
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime as dt
import time
def extract_data(pid):
system("top -H -p "+pid+" -d 0.5 -b >> migrations.txt")
def get_hops(tid):
top_thread = open("migrations.txt")
thread_data = top_thread.readlines()
thread_data = [i.strip() for i in thread_data]
thread_data = [i for i in thread_data if len(i) > 0]
thread_data = [i for i in thread_data if i.split()[0]!="PID"]
time = [i.split()[3] for i in thread_data]
cores = [int(i.split()[-1]) for i in thread_data]
ids = [int(i.split()[0]) for i in thread_data]
df = {"cores":pd.Series(np.array(cores)),"time":pd.Series(np.array(time)),"ids":pd.Series(np.array(ids))}
df = pd.DataFrame(df)
freq_change = {}
start = df['time'][0]
for i in range(1,len(df)):
if df['cores'][i-1]!=df['cores'][i]:
freq_change[(start,df['time'][i])] = df['cores'][i-1]
start = df['time'][i]
return len(set(df[df["ids"]==int(tid)]["cores"].values))
def hops_per_tid(pid):
system("ps -To pid,tid,pcpu,tty,time,comm -p "+pid+" > thread.txt")
data = open("thread.txt")
data = data.readlines()
data = [i.strip() for i in data]
cols = data[0].split()
data = data[1:]
data[0].split()
tids = [i.split()[1] for i in data]
pcpu = [i.split()[2] for i in data]
comm = [" ".join(i.split()[5:]) for i in data]
print(tids)
print(pcpu)
print(comm)
hops = []
for tid in tids:
hops.append(get_hops(tid))
print("HOPS:\n",hops)
df = {"TID":pd.Series(np.array(tids)), "PCPU":pd.Series(np.array(pcpu)), "COMMAND":pd.Series(np.array(comm)), "HOPS":pd.Series(np.array(hops))}
df = pd.DataFrame(df)
df.sort_values(by=['HOPS'], inplace=True)
print(df)
if __name__ == '__main__':
pid = input("Enter Process ID: ")
# refreshRate = input("Enter Refresh Rate: ")
run_top = threading.Thread(target=extract_data, args = (pid,))
hop_per_tid = threading.Thread(target=hops_per_tid, args = (pid,))
run_top.start()
time.sleep(2)
hop_per_tid.start()
run_top.join()
hop_per_tid.join()
print("DONE")
|
[
"pandas.DataFrame",
"threading.Thread",
"os.system",
"time.sleep",
"numpy.array"
] |
[((188, 247), 'os.system', 'system', (["('top -H -p ' + pid + ' -d 0.5 -b >> migrations.txt')"], {}), "('top -H -p ' + pid + ' -d 0.5 -b >> migrations.txt')\n", (194, 247), False, 'from os import system\n'), ((798, 814), 'pandas.DataFrame', 'pd.DataFrame', (['df'], {}), '(df)\n', (810, 814), True, 'import pandas as pd\n'), ((1129, 1200), 'os.system', 'system', (["('ps -To pid,tid,pcpu,tty,time,comm -p ' + pid + ' > thread.txt')"], {}), "('ps -To pid,tid,pcpu,tty,time,comm -p ' + pid + ' > thread.txt')\n", (1135, 1200), False, 'from os import system\n'), ((1797, 1813), 'pandas.DataFrame', 'pd.DataFrame', (['df'], {}), '(df)\n', (1809, 1813), True, 'import pandas as pd\n'), ((2009, 2059), 'threading.Thread', 'threading.Thread', ([], {'target': 'extract_data', 'args': '(pid,)'}), '(target=extract_data, args=(pid,))\n', (2025, 2059), False, 'import threading\n'), ((2081, 2131), 'threading.Thread', 'threading.Thread', ([], {'target': 'hops_per_tid', 'args': '(pid,)'}), '(target=hops_per_tid, args=(pid,))\n', (2097, 2131), False, 'import threading\n'), ((2159, 2172), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2169, 2172), False, 'import time\n'), ((704, 719), 'numpy.array', 'np.array', (['cores'], {}), '(cores)\n', (712, 719), True, 'import numpy as np\n'), ((738, 752), 'numpy.array', 'np.array', (['time'], {}), '(time)\n', (746, 752), True, 'import numpy as np\n'), ((770, 783), 'numpy.array', 'np.array', (['ids'], {}), '(ids)\n', (778, 783), True, 'import numpy as np\n'), ((1666, 1680), 'numpy.array', 'np.array', (['tids'], {}), '(tids)\n', (1674, 1680), True, 'import numpy as np\n'), ((1700, 1714), 'numpy.array', 'np.array', (['pcpu'], {}), '(pcpu)\n', (1708, 1714), True, 'import numpy as np\n'), ((1737, 1751), 'numpy.array', 'np.array', (['comm'], {}), '(comm)\n', (1745, 1751), True, 'import numpy as np\n'), ((1771, 1785), 'numpy.array', 'np.array', (['hops'], {}), '(hops)\n', (1779, 1785), True, 'import numpy as np\n')]
|
import os
import io
import time
import multiprocessing as mp
from queue import Empty
from PIL import Image
from http import server
import socketserver
import numpy as np
import cv2
import urllib.request
Ncams = 1
streamsVideo = ('http://192.168.0.20:8000', 'http://192.168.0.20:8000',
'http://192.168.0.20:8000','http://192.168.0.20:8000', 'rtsp://admin:[email protected]/mpeg4')
streamsRects = ('http://192.168.0.20:8000/data.html', 'http://192.168.0.20:8000/data.html',
'http://192.168.0.20:8000/data.html', 'http://192.168.0.20:8000/data.html')
class objRect:
def __init__(self, rect=None, side=None):
if rect:
self.x0 = rect[0]
self.x1 = rect[1]
self.y0 = rect[2]
self.y1 = rect[3]
else:
self.x0 = 0
self.x1 = 0
self.y0 = 0
self.y1 = 0
if side:
self.side = side
else:
self.side = 0 #0 = right cam, 1 = front cam, 2 = left cam, 3 = back cam
def area(self):
return (abs(self.x1-self.x0)*abs(self.y1-self.y0))
def rect(self):
return (self.x0, self.x1, self.y0, self.y1)
def center(self):
return (self.x1-self.x0, self.y1-self.y0)
def height(self):
return abs(self.y1-self.y0)
def width(self):
return abs(self.x1-self.x0)
def setrect(self, rect):
self.x0 = rect[0]
self.x1 = rect[1]
self.y0 = rect[2]
self.y1 = rect[3]
def copy(self):
return objRect(self.rect(), self.side)
PAGE="""\
<html>
<head>
<title>Jetson TX2 image proccessing output</title>
</head>
<body>
<center><h1>Joined image</h1></center>
<center><img src="stream.mjpg" width="1740" height="740" /></center>
</body>
</html>
"""
class StreamingHandler(server.BaseHTTPRequestHandler):
def do_GET(self):
global cap
if self.path == '/':
self.send_response(301)
self.send_header('Location', '/index.html')
self.end_headers()
elif self.path == '/index.html':
stri = PAGE
content = stri.encode('utf-8')
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(content))
self.end_headers()
self.wfile.write(content)
elif self.path == '/stream.mjpg':
self.send_response(200)
self.send_header('Age', 0)
self.send_header('Cache-Control', 'no-cache, private')
self.send_header('Pragma', 'no-cache')
self.send_header('Content-Type', 'multipart/x-mixed-replace; boundary=FRAME')
self.end_headers()
try:
while True:
if not self.server.Queue.empty():
frame = self.server.Queue.get(False)
ret, buf = cv2.imencode('.jpg', frame)
frame = np.array(buf).tostring()
self.wfile.write(b'--FRAME\r\n')
self.send_header('Content-Type', 'image/jpeg')
self.send_header('Content-Length', len(frame))
self.end_headers()
self.wfile.write(frame)
self.wfile.write(b'\r\r')
except Exception as e:
logging.warning('Removed streaming client %s: %s', self.client_address, str(e))
else:
self.send_error(404)
self.end_headers()
class StreamingServer(socketserver.ThreadingMixIn, server.HTTPServer):
allow_reuse_address = True
daemon_threads = True
def cam_reader(cam, queueoutVideo, queueinRect, stop):
cap = cv2.VideoCapture(streamsVideo[cam])
objdata = []
no_detect_cnt = 0
while cap:
if stop.is_set():
cap.release()
break
ret, frame = cap.read()
if not ret:
pass
# print(frame.shape)
if no_detect_cnt >= 25:
objdata = []
else:
no_detect_cnt += 1
if not queueinRect.empty():
no_detect_cnt = 0
objdata = queueinRect.get(False)
for obj in objdata:
[x0, y0, x1, y1] = obj['objcoord']
x0 = int(x0*frame.shape[1])
x1 = int(x1*frame.shape[1])
y0 = int(y0*frame.shape[0])
y1 = int(y1*frame.shape[0])
frame = cv2.rectangle(frame, (x0,y0),(x1,y1), color=(0,255,0), thickness=3)
frame = cv2.putText(frame, 'ID = {0:d}'.format(obj['objtype']), (x0+6,y1-6), cv2.FONT_HERSHEY_DUPLEX, 0.8, (255,0,0), 2)
if not queueoutVideo.full():
queueoutVideo.put((cam, frame))
def main_cam_reader(queueoutVideo, queueinRect, stop):
cap = cv2.VideoCapture(streamsVideo[-1])
objdata = []
no_detect_cnt = 0
while cap:
if stop.is_set():
cap.release()
break
ret, frame = cap.read()
if not ret:
pass
# print(frame.shape)
frame = cv2.resize(frame, (864, 486))
if no_detect_cnt >= 25:
objdata = []
else:
no_detect_cnt += 1
if not queueinRect.empty():
no_detect_cnt = 0
objdata = queueinRect.get(False)
for obj in objdata:
[x0, y0, x1, y1] = obj['objcoord']
x0 = int(x0*frame.shape[1])
x1 = int(x1*frame.shape[1])
y0 = int(y0*frame.shape[0])
y1 = int(y1*frame.shape[0])
frame = cv2.rectangle(frame, (x0,y0),(x1,y1), color=(0,255,0), thickness=3)
frame = cv2.putText(frame, 'ID = {0:d}'.format(obj['objtype']), (x0+6,y1-6), cv2.FONT_HERSHEY_DUPLEX, 0.8, (255,0,0), 2)
if not queueoutVideo.full():
queueoutVideo.put((4, frame))
def RecognRect(cam, queueout, objsRectqueue, stop):
dataresp = ''
addr = streamsRects[cam]
while not stop.is_set():
try:
response = urllib.request.urlopen(addr)
dataresp += response.read().decode('utf-8')
a = dataresp.find('ffffd9')
b = dataresp.find('ffaaee')
if a != -1 and b != -1:
if b > (a+6):
str = dataresp[a+6:b]
strlist = str.split('\n')
objdata = []
objrects = []
#obj = {'objcoord':[0,0,0,0], 'objtype':0}
strr=''
for i in range(len(strlist)-1):
stri = strlist[i]
temp = re.findall(r'\d+', stri)
objtype = int(temp[-1])
temp = re.findall(r'\d+\.\d*', stri)
objcoord = map(float, temp)
objdata.append({'objcoord':objcoord, 'objtype':objtype})
objrects.append(objRect(objcoord, cam))
if objrects and not objsRectqueue.full():
objsRectqueue.put(objrects)
if objdata and queueout.empty():
queueout.put(objdata)
dataresp = dataresp[b+6:]
except:
pass
time.sleep(0.2)
def concat_frames(queueinVideo, queueout, stop):
#logoImg = cv2.imread('time_replacer.png')
frame_width = (420, 420, 420, 420, 864)
frame_height = (234, 234, 234, 234, 486)
HorGap = 20
VerGap = 20
big_frame = np.zeros((VerGap+frame_height[0]+frame_height[-1], 3*HorGap+4*frame_width[0], 3), np.uint8)
big_frame[:] = (39, 27, 23)
frame_coord_x = (0, frame_width[0]+HorGap, (frame_width[0]+HorGap)*2, (frame_width[0]+HorGap)*3, 0)
frame_coord_y = (0, 0, 0, 0, frame_height[0] + VerGap)
gs_pipeline = 'appsrc ! videoconvert ! omxh264enc control-rate=2 bitrate=1000000 ! ' \
'video/x-h264, stream-format=(string)byte-stream ! h264parse ! ' \
'rtph264pay mtu=1400 ! udpsink host=192.168.0.16 port=8001 sync=false async=false'
vidstreader = cv2.VideoWriter(gs_pipeline, 0, 15/1, (big_frame.shape[1],big_frame.shape[0]), True)
print(vidstreader)
while not stop.is_set():
if not queueinVideo.empty():
(cam, frame) = queueinVideo.get(False)
#big_frame[0:234, cam*420:(cam+1)*420, :] = frame
big_frame[frame_coord_y[cam]:frame_coord_y[cam]+frame_height[cam], frame_coord_x[cam]:frame_coord_x[cam]+frame_width[cam]] = frame
vidstreader.write(big_frame)
#print(big_frame.shape)
#if queueout.empty():
# queueout.put(big_frame)
vidstreader.release()
def server_start(port, queue, stop):
try:
address = ('', port)
server = StreamingServer(address, StreamingHandler)
server.Queue = queueServer
print('Server is running...')
server.serve_forever()
except (KeyboardInterrupt, SystemExit):
stop.set()
if __name__ == '__main__':
queueServer = mp.Queue(1)
queueFrames = mp.Queue(5)
queueGlobRecognRects = mp.Queue(10)
StopFlag = mp.Event()
queueRects = []
procsDetectRects = []
procsCamStream = []
for cam in range(Ncams):
queueRects.append(mp.Queue(1))
procsDetectRects.append(mp.Process(target=RecognRect, args=(cam, queueRects[cam], queueGlobRecognRects, StopFlag)))
procsCamStream.append(mp.Process(target=cam_reader, args=(cam, queueFrames, queueRects[cam], StopFlag)))
queueRects.append(mp.Queue(1))
procMainCamStream = mp.Process(target=main_cam_reader, args=(queueFrames, queueRects[-1], StopFlag))
ConcatProc = mp.Process(target=concat_frames, args=(queueFrames, queueServer, StopFlag))
ServerProc = mp.Process(target=server_start, args=(8000, queueServer, StopFlag))
st = time.time()
ConcatProc.start()
ServerProc.start()
for cam in range(Ncams):
procsCamStream[cam].start()
procsDetectRects[cam].start()
procMainCamStream.start()
while True:
if StopFlag.is_set():
StopFlag.set()
time.sleep(0.1)
for cam in range(Ncams):
procsCamStream[cam].terminate()
procsDetectRects[cam].terminate()
procMainCamStream.terminate()
ConcatProc.terminate()
ServerProc.terminate()
break
time.sleep(1)
exit(0)
|
[
"numpy.zeros",
"http.server.serve_forever",
"time.time",
"cv2.VideoCapture",
"time.sleep",
"cv2.rectangle",
"cv2.imencode",
"numpy.array",
"multiprocessing.Queue",
"cv2.VideoWriter",
"multiprocessing.Event",
"multiprocessing.Process",
"cv2.resize"
] |
[((3825, 3860), 'cv2.VideoCapture', 'cv2.VideoCapture', (['streamsVideo[cam]'], {}), '(streamsVideo[cam])\n', (3841, 3860), False, 'import cv2\n'), ((4935, 4969), 'cv2.VideoCapture', 'cv2.VideoCapture', (['streamsVideo[-1]'], {}), '(streamsVideo[-1])\n', (4951, 4969), False, 'import cv2\n'), ((7667, 7772), 'numpy.zeros', 'np.zeros', (['(VerGap + frame_height[0] + frame_height[-1], 3 * HorGap + 4 * frame_width[\n 0], 3)', 'np.uint8'], {}), '((VerGap + frame_height[0] + frame_height[-1], 3 * HorGap + 4 *\n frame_width[0], 3), np.uint8)\n', (7675, 7772), True, 'import numpy as np\n'), ((8253, 8345), 'cv2.VideoWriter', 'cv2.VideoWriter', (['gs_pipeline', '(0)', '(15 / 1)', '(big_frame.shape[1], big_frame.shape[0])', '(True)'], {}), '(gs_pipeline, 0, 15 / 1, (big_frame.shape[1], big_frame.\n shape[0]), True)\n', (8268, 8345), False, 'import cv2\n'), ((9211, 9222), 'multiprocessing.Queue', 'mp.Queue', (['(1)'], {}), '(1)\n', (9219, 9222), True, 'import multiprocessing as mp\n'), ((9241, 9252), 'multiprocessing.Queue', 'mp.Queue', (['(5)'], {}), '(5)\n', (9249, 9252), True, 'import multiprocessing as mp\n'), ((9280, 9292), 'multiprocessing.Queue', 'mp.Queue', (['(10)'], {}), '(10)\n', (9288, 9292), True, 'import multiprocessing as mp\n'), ((9309, 9319), 'multiprocessing.Event', 'mp.Event', ([], {}), '()\n', (9317, 9319), True, 'import multiprocessing as mp\n'), ((9755, 9840), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'main_cam_reader', 'args': '(queueFrames, queueRects[-1], StopFlag)'}), '(target=main_cam_reader, args=(queueFrames, queueRects[-1], StopFlag)\n )\n', (9765, 9840), True, 'import multiprocessing as mp\n'), ((9854, 9929), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'concat_frames', 'args': '(queueFrames, queueServer, StopFlag)'}), '(target=concat_frames, args=(queueFrames, queueServer, StopFlag))\n', (9864, 9929), True, 'import multiprocessing as mp\n'), ((9947, 10014), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'server_start', 'args': '(8000, queueServer, StopFlag)'}), '(target=server_start, args=(8000, queueServer, StopFlag))\n', (9957, 10014), True, 'import multiprocessing as mp\n'), ((10024, 10035), 'time.time', 'time.time', ([], {}), '()\n', (10033, 10035), False, 'import time\n'), ((5208, 5237), 'cv2.resize', 'cv2.resize', (['frame', '(864, 486)'], {}), '(frame, (864, 486))\n', (5218, 5237), False, 'import cv2\n'), ((7417, 7432), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (7427, 7432), False, 'import time\n'), ((9079, 9101), 'http.server.serve_forever', 'server.serve_forever', ([], {}), '()\n', (9099, 9101), False, 'from http import server\n'), ((9718, 9729), 'multiprocessing.Queue', 'mp.Queue', (['(1)'], {}), '(1)\n', (9726, 9729), True, 'import multiprocessing as mp\n'), ((10595, 10608), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (10605, 10608), False, 'import time\n'), ((9446, 9457), 'multiprocessing.Queue', 'mp.Queue', (['(1)'], {}), '(1)\n', (9454, 9457), True, 'import multiprocessing as mp\n'), ((9491, 9585), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'RecognRect', 'args': '(cam, queueRects[cam], queueGlobRecognRects, StopFlag)'}), '(target=RecognRect, args=(cam, queueRects[cam],\n queueGlobRecognRects, StopFlag))\n', (9501, 9585), True, 'import multiprocessing as mp\n'), ((9613, 9698), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'cam_reader', 'args': '(cam, queueFrames, queueRects[cam], StopFlag)'}), '(target=cam_reader, args=(cam, queueFrames, queueRects[cam],\n StopFlag))\n', (9623, 9698), True, 'import multiprocessing as mp\n'), ((10306, 10321), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (10316, 10321), False, 'import time\n'), ((4583, 4655), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x0, y0)', '(x1, y1)'], {'color': '(0, 255, 0)', 'thickness': '(3)'}), '(frame, (x0, y0), (x1, y1), color=(0, 255, 0), thickness=3)\n', (4596, 4655), False, 'import cv2\n'), ((5738, 5810), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x0, y0)', '(x1, y1)'], {'color': '(0, 255, 0)', 'thickness': '(3)'}), '(frame, (x0, y0), (x1, y1), color=(0, 255, 0), thickness=3)\n', (5751, 5810), False, 'import cv2\n'), ((2968, 2995), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'frame'], {}), "('.jpg', frame)\n", (2980, 2995), False, 'import cv2\n'), ((3032, 3045), 'numpy.array', 'np.array', (['buf'], {}), '(buf)\n', (3040, 3045), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# @Author: <NAME>
# @Email: <EMAIL>
# @Date: 2019-06-27 13:59:02
# @Last Modified by: <NAME>
# @Last Modified time: 2020-08-08 16:23:59
import os
from sys import getsizeof
import json
import pickle
import re
import numpy as np
from scipy.interpolate import interp1d
from ..utils import isWithin, isIterable, moveItem
class Lookup:
''' Multidimensional lookup object allowing to store, project,
interpolate and retrieve several lookup tables along multiple
reference input vectors.
'''
interp_choices = ('linear', 'quadratic', 'cubic', 'poly1', 'poly2', 'poly3')
def __init__(self, refs, tables, interp_method='linear', extrapolate=False):
''' Constructor.
:param refs: dictionary of reference one-dimensional input vectors.
:param tables: dictionary of multi-dimensional lookup tables
:param interp_method: interpolation method
:param extrapolate: boolean stating whether tables can be extrapolated outside
of reference bounds
'''
self.refs = refs
self.tables = tables
self.interp_method = interp_method
self.extrapolate = extrapolate
for k, v in self.items():
if v.shape != self.dims:
raise ValueError(
f'{k} Table dimensions {v.shape} does not match references {self.dims}')
# If no dimension, make sure tables contain scalars
if self.ndims == 0 and isinstance(self.tables[self.outputs[0]], np.ndarray):
self.tables = {k: v.item(0) for k, v in self.items()}
# If single input, mark it as sole ref
if self.ndims == 1:
self.refkey = self.inputs[0]
self.ref = self.refs[self.refkey]
def __repr__(self):
ref_str = ', '.join([f'{x[0]}: {x[1]}' for x in zip(self.inputs, self.dims)])
tables_str = ', '.join(self.outputs)
return f'{self.__class__.__name__}{self.ndims}D({ref_str})[{tables_str}]'
def __getitem__(self, key):
''' simplified lookup table getter. '''
return self.tables[key]
def __delitem__(self, key):
''' simplified lookup table suppressor. '''
del self.tables[key]
def __setitem__(self, key, value):
''' simplified lookup table setter. '''
self.tables[key] = value
def __sizeof__(self):
''' Return the size of the lookup in bytes. '''
s = getsizeof(self.refs) + getsizeof(self.tables)
for k, v in self.refitems():
s += v.nbytes
for k, v in self.items():
s += v.nbytes
return s
def keys(self):
return self.tables.keys()
def values(self):
return self.tables.values()
def items(self):
return self.tables.items()
def refitems(self):
return self.refs.items()
def pop(self, key):
x = self.tables[key]
del self.tables[key]
return x
def rename(self, key1, key2):
self.tables[key2] = self.tables.pop(key1)
@property
def dims(self):
''' Tuple indicating the size of each input vector. '''
return tuple([x.size for x in self.refs.values()])
@property
def ndims(self):
''' Number of dimensions in lookup. '''
return len(self.refs)
@property
def inputs(self):
''' Names of reference input vectors. '''
return list(self.refs.keys())
@property
def outputs(self):
''' Names of the different output tables. '''
return list(self.keys())
@property
def interp_method(self):
return self._interp_method
@interp_method.setter
def interp_method(self, value):
if value not in self.interp_choices:
raise ValueError(f'interpolation method must be one of {self.interp_choices}')
if self.isPolynomialMethod(value) and self.ndims > 1:
raise ValueError(f'polynomial interpolation only available for 1D lookups')
self._interp_method = value
@property
def extrapolate(self):
return self._extrapolate
@extrapolate.setter
def extrapolate(self, value):
if not isinstance(value, bool):
raise ValueError(f'extrapolate: expected boolean')
self._extrapolate = value
@property
def kwattrs(self):
return {
'interp_method': self.interp_method,
'extrapolate': self.extrapolate}
def checkAgainst(self, other):
''' Check self object against another lookup object for compatibility. '''
if self.inputs != other.inputs:
raise ValueError(f'Differing lookups (references names do not match)')
if self.dims != other.dims:
raise ValueError(f'Differing lookup dimensions ({self.dims} - {other.dims})')
for k, v in self.refitems():
if (other.refs[k] != v).any():
raise ValueError(f'Differing {k} lookup reference')
if self.outputs != other.outputs:
raise ValueError(f'Differing lookups (table names do not match)')
def operate(self, other, op):
''' Generic arithmetic operator. '''
if isinstance(other, int):
other = float(other)
if isinstance(other, self.__class__):
self.checkAgainst(other)
tables = {k: getattr(v, op)(other[k]) for k, v in self.items()}
elif isinstance(other, float):
tables = {k: getattr(v, op)(other) for k, v in self.items()}
else:
raise ValueError(f'Cannot {op} {self.__class__} object with {type(other)} variable')
return self.__class__(self.refs, tables, **self.kwattrs)
def __add__(self, other):
''' Addition operator. '''
return self.operate(other, '__add__')
def __sub__(self, other):
''' Subtraction operator. '''
return self.operate(other, '__sub__')
def __mul__(self, other):
''' Multiplication operator. '''
return self.operate(other, '__mul__')
def __truediv__(self, other):
''' Division operator. '''
return self.operate(other, '__truediv__')
def squeeze(self):
''' Return a new lookup object in which all lookup dimensions that only contain
a single value have been removed '''
new_tables = {k: v.squeeze() for k, v in self.items()}
new_refs = {}
for k, v in self.refitems():
if v.size > 1:
new_refs[k] = v
return self.__class__(new_refs, new_tables, **self.kwattrs)
def getAxisIndex(self, key):
''' Get the axis index of a specific input key. '''
assert key in self.inputs, f'Unkown input dimension: {key}'
return self.inputs.index(key)
def copy(self):
''' Return a copy of the current lookup object. '''
return self.__class__(self.refs, self.tables, **self.kwattrs)
def checkInterpMethod(self, interp_method):
if interp_method not in self.interp_choices:
raise ValueError(f'interpolation method must be one of {self.interp_choices}')
@staticmethod
def isPolynomialMethod(method):
return method.startswith('poly')
def getInterpolationDegree(self):
return int(self.interp_method[-1])
def getInterpolator(self, ref_key, table_key, axis=-1):
''' Return 1D interpolator function along a given reference vector for a specific table .'''
if self.isPolynomialMethod(self.interp_method):
return np.poly1d(np.polyfit(self.refs[ref_key], self.tables[table_key],
self.getInterpolationDegree()))
else:
fill_value = 'extrapolate' if self.kwattrs['extrapolate'] else np.nan
return interp1d(self.refs[ref_key], self.tables[table_key], axis=axis,
kind=self.interp_method, assume_sorted=True, fill_value=fill_value)
def project(self, key, value):
''' Return a new lookup object in which tables are interpolated at one/several
specific value(s) along a given dimension.
:param key: input key
:param value: value(s) to interpolate lookup tables at
:return: new interpolated lookup object with adapted dimensions
'''
# Check if value is 0 or 1-dimensional
if not isIterable(value):
delete_input_dim = True
else:
delete_input_dim = False
value = np.asarray(value)
# Check that value is within the bounds of the reference vector
if not self.kwattrs['extrapolate']:
value = isWithin(key, value, (self.refs[key].min(), self.refs[key].max()))
# Get the axis index of the reference vector
axis = self.getAxisIndex(key)
# print(f'interpolating lookup along {key} (axis {axis}) at {value}')
# Construct new tables dictionary
if self.refs[key].size == 1:
# If reference vector has only 1 value, take the mean along corresponding dimension
new_tables = {k: v.mean(axis=axis) for k, v in self.items()}
else:
# Otherwise, interpolate lookup tables appropriate value(s) along the reference vector
new_tables = {k: self.getInterpolator(key, k, axis=axis)(value) for k in self.keys()}
# Construct new refs dictionary, deleting
new_refs = self.refs.copy()
if delete_input_dim:
# If interpolation value is a scalar, remove the corresponding input vector
del new_refs[key]
else:
# Otherwise, update the input vector at the interpolation values
new_refs[key] = value
# Construct and return a lookup object with the updated refs and tables
return self.__class__(new_refs, new_tables, **self.kwattrs)
def projectN(self, projections):
''' Project along multiple dimensions simultaneously.
:param projections: dictionary of input keys and corresponding interpolation value(s)
:return: new interpolated lookup object with adapted dimensions
'''
# Construct a copy of the current lookup object
lkp = self.copy()
# Apply successive projections, overwriting the lookup object at each step
for k, v in projections.items():
lkp = lkp.project(k, v)
# Return updated lookup object
return lkp
def move(self, key, index):
''' Move a specific input to a new index and re-organize lookup object accordingly.
:param key: input key
:param index: target index
'''
# Get absolute target axis index
if index == -1:
index = self.ndims - 1
# Get reference axis index
iref = self.getAxisIndex(key)
# Re-organize all lookup tables, moving the reference axis to the target index
for k in self.keys():
self.tables[k] = np.moveaxis(self.tables[k], iref, index)
# Re-order refs dictionary such that key falls at the appropriate index
self.refs = {k: self.refs[k] for k in moveItem(list(self.refs.keys()), key, index)}
def interpVar1D(self, ref_value, var_key):
''' Interpolate a specific lookup vector at one/several specific value(s)
along the reference input vector.
:param ref_value: specific input value
:param var_key: output table key
:return: interpolated value(s)
.. warning:: This method can only be used for 1 dimensional lookups.
'''
assert self.ndims == 1, 'Cannot interpolate multi-dimensional object'
return np.interp(ref_value, self.ref, self.tables[var_key], left=np.nan, right=np.nan)
def interpolate1D(self, value):
''' Interpolate all lookup vectors variable at one/several specific value(s)
along the reference input vector.
:param value: specific input value
:return: dictionary of output keys: interpolated value(s)
.. warning:: This method can only be used for 1 dimensional lookups.
'''
return {k: self.interpVar1D(value, k) for k in self.outputs}
def tile(self, ref_name, ref_values):
''' Return a new lookup object in which tables are tiled along a new input dimension.
:param ref_name: input name
:param ref_values: input vector
:return: lookup object with additional input vector and tiled tables
'''
itiles = range(ref_values.size)
tables = {k: np.array([v for i in itiles]) for k, v in self.items()}
refs = {**{ref_name: ref_values}, **self.refs}
return self.__class__(refs, tables, **self.kwattrs)
def reduce(self, rfunc, ref_name):
''' Reduce lookup by applying a reduction function along a specific reference axis. '''
iaxis = self.getAxisIndex(ref_name)
refs = {k: v for k, v in self.refitems() if k != ref_name}
tables = {k: rfunc(v, axis=iaxis) for k, v in self.items()}
return self.__class__(refs, tables, **self.kwattrs)
def toDict(self):
''' Translate self object into a dictionary. '''
return {
'refs': {k: v.tolist() for k, v in self.refs.items()},
'tables': {k: v.tolist() for k, v in self.tables.items()},
}
@classmethod
def fromDict(cls, d):
''' Construct lookup instance from dictionary. '''
refs = {k: np.array(v) for k, v in d['refs'].items()}
tables = {k: np.array(v) for k, v in d['tables'].items()}
return cls(refs, tables)
def toJson(self, fpath):
''' Save self object to a JSON file. '''
with open(fpath, 'w') as fh:
json.dump(self.toDict(), fh)
@classmethod
def fromJson(cls, fpath):
''' Construct lookup instance from JSON file. '''
cls.checkForExistence(fpath)
with open(fpath) as fh:
d = json.load(fh)
return cls.fromDict(d)
def toPickle(self, fpath):
''' Save self object to a PKL file. '''
with open(fpath, 'wb') as fh:
pickle.dump({'refs': self.refs, 'tables': self.tables}, fh)
@classmethod
def fromPickle(cls, fpath):
''' Construct lookup instance from PKL file. '''
cls.checkForExistence(fpath)
with open(fpath, 'rb') as fh:
d = pickle.load(fh)
return cls(d['refs'], d['tables'])
@staticmethod
def checkForExistence(fpath):
''' Raise an error if filepath does not correspond to an existing file. '''
if not os.path.isfile(fpath):
raise FileNotFoundError(f'Missing lookup file: "{fpath}"')
class EffectiveVariablesLookup(Lookup):
''' Lookup object with added functionality to handle effective variables, namely:
- a special EffectiveVariablesDict wrapper around the output tables
- projectOff and projectDC methods allowing for smart projections.
'''
def __init__(self, refs, tables, **kwargs):
if not isinstance(tables, EffectiveVariablesDict):
tables = EffectiveVariablesDict(tables)
super().__init__(refs, tables, **kwargs)
def interpolate1D(self, value):
return EffectiveVariablesDict(super().interpolate1D(value))
def projectOff(self):
''' Project for OFF periods (zero amplitude). '''
# Interpolate at zero amplitude
lkp0 = self.project('A', 0.)
# Move charge axis to end in all tables
Qaxis = lkp0.getAxisIndex('Q')
for k, v in lkp0.items():
lkp0.tables[k] = np.moveaxis(v, Qaxis, -1)
# Iterate along dimensions and take first value along corresponding axis
for i in range(lkp0.ndims - 1):
for k, v in lkp0.items():
lkp0.tables[k] = v[0]
# Keep only charge vector in references
lkp0.refs = {'Q': lkp0.refs['Q']}
return lkp0
def projectDC(self, amps=None, DC=1.):
''' Project lookups at a given duty cycle.'''
# Assign default values
if amps is None:
amps = self.refs['A']
elif not isIterable(amps):
amps = np.array([amps])
# project lookups at zero and defined amps
lkp0 = self.project('A', 0.)
lkps_ON = self.project('A', amps)
# Retrieve amplitude axis index, and move amplitude to first axis
A_axis = lkps_ON.getAxisIndex('A')
lkps_ON.move('A', 0)
# Tile the zero-amplitude lookup to match the lkps_ON dimensions
lkps_OFF = lkp0.tile('A', lkps_ON.refs['A'])
# Compute a DC averaged lookup
lkp = lkps_ON * DC + lkps_OFF * (1 - DC)
# Move amplitude back to its original axis
lkp.move('A', A_axis)
return lkp
class EffectiveVariablesDict():
''' Wrapper around a dictionary object, allowing to return derived
effetive variables for special keys.
'''
# Key patterns
suffix_pattern = '[A-Za-z0-9_]+'
xinf_pattern = re.compile(f'^({suffix_pattern})inf$')
taux_pattern = re.compile(f'^tau({suffix_pattern})$')
def __init__(self, d):
self.d = d
def __repr__(self):
return self.__class__.__name__ + '(' + ', '.join(self.d.keys()) + ')'
def items(self):
return self.d.items()
def keys(self):
return self.d.keys()
def values(self):
return self.d.values()
def alphax(self, x):
return self.d[f'alpha{x}']
def betax(self, x):
return self.d[f'beta{x}']
def taux(self, x):
return 1 / (self.alphax(x) + self.betax(x))
def xinf(self, x):
return self.alphax(x) * self.taux(x)
def __getitem__(self, key):
if key in self.d:
return self.d[key]
else:
m = self.taux_pattern.match(key)
if m is not None:
return self.taux(m.group(1))
else:
m = self.xinf_pattern.match(key)
if m is not None:
return self.xinf(m.group(1))
else:
raise KeyError(key)
def __setitem__(self, key, value):
self.d[key] = value
def __delitem__(self, key):
del self.d[key]
def pop(self, key):
return self.d.pop(key)
|
[
"numpy.moveaxis",
"json.load",
"pickle.dump",
"numpy.asarray",
"os.path.isfile",
"pickle.load",
"numpy.array",
"sys.getsizeof",
"numpy.interp",
"scipy.interpolate.interp1d",
"re.compile"
] |
[((17091, 17129), 're.compile', 're.compile', (['f"""^({suffix_pattern})inf$"""'], {}), "(f'^({suffix_pattern})inf$')\n", (17101, 17129), False, 'import re\n'), ((17149, 17187), 're.compile', 're.compile', (['f"""^tau({suffix_pattern})$"""'], {}), "(f'^tau({suffix_pattern})$')\n", (17159, 17187), False, 'import re\n'), ((11705, 11784), 'numpy.interp', 'np.interp', (['ref_value', 'self.ref', 'self.tables[var_key]'], {'left': 'np.nan', 'right': 'np.nan'}), '(ref_value, self.ref, self.tables[var_key], left=np.nan, right=np.nan)\n', (11714, 11784), True, 'import numpy as np\n'), ((2468, 2488), 'sys.getsizeof', 'getsizeof', (['self.refs'], {}), '(self.refs)\n', (2477, 2488), False, 'from sys import getsizeof\n'), ((2491, 2513), 'sys.getsizeof', 'getsizeof', (['self.tables'], {}), '(self.tables)\n', (2500, 2513), False, 'from sys import getsizeof\n'), ((7791, 7927), 'scipy.interpolate.interp1d', 'interp1d', (['self.refs[ref_key]', 'self.tables[table_key]'], {'axis': 'axis', 'kind': 'self.interp_method', 'assume_sorted': '(True)', 'fill_value': 'fill_value'}), '(self.refs[ref_key], self.tables[table_key], axis=axis, kind=self.\n interp_method, assume_sorted=True, fill_value=fill_value)\n', (7799, 7927), False, 'from scipy.interpolate import interp1d\n'), ((8507, 8524), 'numpy.asarray', 'np.asarray', (['value'], {}), '(value)\n', (8517, 8524), True, 'import numpy as np\n'), ((10988, 11028), 'numpy.moveaxis', 'np.moveaxis', (['self.tables[k]', 'iref', 'index'], {}), '(self.tables[k], iref, index)\n', (10999, 11028), True, 'import numpy as np\n'), ((12610, 12639), 'numpy.array', 'np.array', (['[v for i in itiles]'], {}), '([v for i in itiles])\n', (12618, 12639), True, 'import numpy as np\n'), ((13523, 13534), 'numpy.array', 'np.array', (['v'], {}), '(v)\n', (13531, 13534), True, 'import numpy as np\n'), ((13587, 13598), 'numpy.array', 'np.array', (['v'], {}), '(v)\n', (13595, 13598), True, 'import numpy as np\n'), ((14013, 14026), 'json.load', 'json.load', (['fh'], {}), '(fh)\n', (14022, 14026), False, 'import json\n'), ((14188, 14247), 'pickle.dump', 'pickle.dump', (["{'refs': self.refs, 'tables': self.tables}", 'fh'], {}), "({'refs': self.refs, 'tables': self.tables}, fh)\n", (14199, 14247), False, 'import pickle\n'), ((14446, 14461), 'pickle.load', 'pickle.load', (['fh'], {}), '(fh)\n', (14457, 14461), False, 'import pickle\n'), ((14657, 14678), 'os.path.isfile', 'os.path.isfile', (['fpath'], {}), '(fpath)\n', (14671, 14678), False, 'import os\n'), ((15665, 15690), 'numpy.moveaxis', 'np.moveaxis', (['v', 'Qaxis', '(-1)'], {}), '(v, Qaxis, -1)\n', (15676, 15690), True, 'import numpy as np\n'), ((16244, 16260), 'numpy.array', 'np.array', (['[amps]'], {}), '([amps])\n', (16252, 16260), True, 'import numpy as np\n')]
|
import matplotlib.pyplot as plt
from matplotlib.patches import RegularPolygon
from matplotlib.collections import PatchCollection
from pkg_resources import resource_filename
import numpy as np
def load_pixel_coordinates():
filename = resource_filename('iceact', 'resources/pixel_coordinates.txt')
x, y = np.genfromtxt(filename, unpack=True)
return x, y
def plot_camera(data, cmap='gray', ax=None):
'''
Create an IceAct camera plot
Parameters
----------
data: array-like with length 61
data array with one value per pixel
cmap: str or matplotlib.colors.ColorMap instance
The colormap to use
ax: matplotlib.axes.Axes instace
The axes to use. If not given, the current axes will be used.
'''
if ax is None:
ax = plt.gca()
x, y = load_pixel_coordinates()
hexagons = [
RegularPolygon(xy, 6, radius=1)
for xy in zip(x, y)
]
collection = PatchCollection(hexagons)
collection.set_array(data)
collection.set_cmap(cmap)
ax.add_collection(collection)
ax.set_xlim(x.min() - 2, x.max() + 2)
ax.set_ylim(y.min() - 2, y.max() + 2)
ax.set_aspect(1)
return collection
|
[
"matplotlib.patches.RegularPolygon",
"numpy.genfromtxt",
"pkg_resources.resource_filename",
"matplotlib.pyplot.gca",
"matplotlib.collections.PatchCollection"
] |
[((239, 301), 'pkg_resources.resource_filename', 'resource_filename', (['"""iceact"""', '"""resources/pixel_coordinates.txt"""'], {}), "('iceact', 'resources/pixel_coordinates.txt')\n", (256, 301), False, 'from pkg_resources import resource_filename\n'), ((313, 349), 'numpy.genfromtxt', 'np.genfromtxt', (['filename'], {'unpack': '(True)'}), '(filename, unpack=True)\n', (326, 349), True, 'import numpy as np\n'), ((950, 975), 'matplotlib.collections.PatchCollection', 'PatchCollection', (['hexagons'], {}), '(hexagons)\n', (965, 975), False, 'from matplotlib.collections import PatchCollection\n'), ((794, 803), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (801, 803), True, 'import matplotlib.pyplot as plt\n'), ((867, 898), 'matplotlib.patches.RegularPolygon', 'RegularPolygon', (['xy', '(6)'], {'radius': '(1)'}), '(xy, 6, radius=1)\n', (881, 898), False, 'from matplotlib.patches import RegularPolygon\n')]
|
"""Implement boundary and interior discretization on regular Fourier grid."""
import numpy as np
from src.grid import Grid
from scipy.sparse import vstack as sparse_vstack
def space_points(bdry_x, bdry_y, dx):
"""Space points equally on boundary with distance approximately dx."""
m = 8192
k = np.arange(m+1)/m
fine_boundary = np.transpose([bdry_x(k), bdry_y(k)])
# Find length of boundary curve.
pts_diff = fine_boundary[1:] - fine_boundary[:-1]
ptwise_len = np.linalg.norm(pts_diff, axis=1)
length = np.sum(ptwise_len)
num_pts = int(length/dx)
corrected_dx = length/num_pts
# Place points corrected_dx apart on boundary.
boundary = np.array([bdry_x(0), bdry_y(0)])
current_position = 0
current_index = 1
current_fine_index = 1
while current_index < num_pts:
current_position += ptwise_len[current_fine_index]
current_fine_index += 1
if current_position >= current_index*corrected_dx:
boundary = np.vstack((boundary,
fine_boundary[current_fine_index, :]))
current_index += 1
return boundary
def vstack(vectors):
if type(vectors[0]) == scipy.sparse.coo.coo_matrix:
return sparse_vstack(vectors)
else:
return np.vstack(vectors)
class Domain:
"""Domain discretization (interior and boundary) on regular grid."""
def __init__(self, grid: Grid,
interior_test,
boundary,
boundary_density=1,
interp_accuracy='cubic'):
"""Create discretization of domain, interior and boundary.
Every domain is associated with a regular grid, grid.
interior_test takes x and y values as input and determines if a
point is in the interior of the domain.
Boundary is a list of curve parametrizations which parametrize the
full boundary.
boundary_density measures the ratio of the density of the boundary
discretization to the density of the regular grid.
It can be decreased less than one to improve conditioning.
"""
self.grid = grid
# Interior.
self.interior_flag = interior_test(grid.grid_x, grid.grid_y)
self.num_int_pts = np.sum(self.interior_flag)
# Boundary.
self.boundary_density = boundary_density
self.dx = self.grid.grid_length / boundary_density
self.boundary = space_points(boundary[0],
boundary[1],
self.dx)
self.num_bdry_pts = self.boundary.shape[0]
self.interp_accuracy = interp_accuracy
boundary_interpolation = []
for i in range(self.num_bdry_pts):
boundary_interpolation.append(self.grid.interp(self.boundary[i, :],
interp_accuracy))
self.interp = vstack(boundary_interpolation)
def restriction(self, u):
return u[self.interior_flag]
def extension_by_zero(self, u):
u_ext = np.zeros_like(self.grid.grid_x)
u_ext[self.interior_flag] = u
return u_ext
|
[
"numpy.zeros_like",
"numpy.sum",
"scipy.sparse.vstack",
"numpy.arange",
"numpy.linalg.norm",
"numpy.vstack"
] |
[((490, 522), 'numpy.linalg.norm', 'np.linalg.norm', (['pts_diff'], {'axis': '(1)'}), '(pts_diff, axis=1)\n', (504, 522), True, 'import numpy as np\n'), ((536, 554), 'numpy.sum', 'np.sum', (['ptwise_len'], {}), '(ptwise_len)\n', (542, 554), True, 'import numpy as np\n'), ((308, 324), 'numpy.arange', 'np.arange', (['(m + 1)'], {}), '(m + 1)\n', (317, 324), True, 'import numpy as np\n'), ((1237, 1259), 'scipy.sparse.vstack', 'sparse_vstack', (['vectors'], {}), '(vectors)\n', (1250, 1259), True, 'from scipy.sparse import vstack as sparse_vstack\n'), ((1285, 1303), 'numpy.vstack', 'np.vstack', (['vectors'], {}), '(vectors)\n', (1294, 1303), True, 'import numpy as np\n'), ((2273, 2299), 'numpy.sum', 'np.sum', (['self.interior_flag'], {}), '(self.interior_flag)\n', (2279, 2299), True, 'import numpy as np\n'), ((999, 1058), 'numpy.vstack', 'np.vstack', (['(boundary, fine_boundary[current_fine_index, :])'], {}), '((boundary, fine_boundary[current_fine_index, :]))\n', (1008, 1058), True, 'import numpy as np\n'), ((3098, 3129), 'numpy.zeros_like', 'np.zeros_like', (['self.grid.grid_x'], {}), '(self.grid.grid_x)\n', (3111, 3129), True, 'import numpy as np\n')]
|
from __future__ import division
import numpy as np
import torch.nn as nn
from mmcv.cnn import normal_init
from mmdet.core import (AnchorGenerator, anchor_target_plus,
delta2bbox, force_fp32, multi_apply, multiclass_nms)
from ..builder import build_loss
from ..registry import HEADS
from ..utils import ConvModule, bias_init_with_prob
@HEADS.register_module
class BRLRetinaHead(nn.Module):
def __init__(self,
num_classes,
in_channels,
feat_channels=256,
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
anchor_base_sizes=None,
target_means=(0., 0., 0., 0.),
target_stds=(1.0, 1.0, 1.0, 1.0),
stacked_convs=4,
octave_base_scale=4,
scales_per_octave=3,
loss_cls_normal=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_cls_confusion=dict(
type='MirrorFocalLoss',
use_sigmoid=True,
gamma_1=2.0,
gamma_2=2.0,
alpha=0.25,
thresh=0.5,
beta=1.0),
loss_bbox=dict(
type='SmoothL1Loss', beta=0.11, loss_weight=1.0),
conv_cfg=None,
norm_cfg=None,
**kwargs):
super(BRLRetinaHead, self).__init__()
self.in_channels = in_channels
self.num_classes = num_classes
self.feat_channels = feat_channels
self.anchor_ratios = anchor_ratios
self.anchor_strides = anchor_strides
self.stacked_convs = stacked_convs
self.octave_base_scale = octave_base_scale
self.scales_per_octave = scales_per_octave
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
octave_scales = np.array(
[2**(i / scales_per_octave) for i in range(scales_per_octave)])
self.anchor_scales = octave_scales * octave_base_scale
self.anchor_base_sizes = list(anchor_strides) if anchor_base_sizes is None else anchor_base_sizes
self.target_means = target_means
self.target_stds = target_stds
self.use_sigmoid_cls = True
self.sampling = False
if self.use_sigmoid_cls:
self.cls_out_channels = num_classes -1
self.loss_cls_normal = build_loss(loss_cls_normal)
self.loss_cls_confusion = build_loss(loss_cls_confusion)
self.loss_bbox = build_loss(loss_bbox)
self.fp16_enabled = False
self.anchor_generators = []
for anchor_base in self.anchor_base_sizes:
self.anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, anchor_ratios))
self.num_anchors = len(self.anchor_scales) * len(self.anchor_ratios)
self._init_layers()
def _init_layers(self):
self.relu = nn.ReLU(inplace=True)
self.cls_convs = nn.ModuleList()
self.reg_convs = nn.ModuleList()
for i in range(self.stacked_convs):
chn = self.in_channels if i == 0 else self.feat_channels
self.cls_convs.append(
ConvModule(
chn,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg))
self.reg_convs.append(
ConvModule(
chn,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg))
self.retina_cls = nn.Conv2d(
self.feat_channels,
self.num_anchors * self.cls_out_channels,
3,
padding=1)
self.retina_reg = nn.Conv2d(
self.feat_channels, self.num_anchors * 4, 3, padding=1)
def init_weights(self):
for m in self.cls_convs:
normal_init(m.conv, std=0.01)
for m in self.reg_convs:
normal_init(m.conv, std=0.01)
bias_cls = bias_init_with_prob(0.01)
normal_init(self.retina_cls, std=0.01, bias=bias_cls)
normal_init(self.retina_reg, std=0.01)
def forward(self, feats):
return multi_apply(self.forward_single, feats)
def forward_single(self, x):
cls_feat = x
reg_feat = x
for cls_conv in self.cls_convs:
cls_feat = cls_conv(cls_feat)
for reg_conv in self.reg_convs:
reg_feat = reg_conv(reg_feat)
cls_score = self.retina_cls(cls_feat)
bbox_pred = self.retina_reg(reg_feat)
return cls_score, bbox_pred
def get_anchors(self, featmap_sizes, img_metas, device='cuda'):
num_imgs = len(img_metas)
num_levels = len(featmap_sizes)
# since feature map sizes of all images are the same, we only compute
# anchors for one time
multi_level_anchors = []
for i in range(num_levels):
anchors = self.anchor_generators[i].grid_anchors(
featmap_sizes[i], self.anchor_strides[i], device=device)
multi_level_anchors.append(anchors)
anchor_list = [multi_level_anchors for _ in range(num_imgs)]
# for each image, we compute valid flags of multi level anchors
valid_flag_list = []
for img_id, img_meta in enumerate(img_metas):
multi_level_flags = []
for i in range(num_levels):
anchor_stride = self.anchor_strides[i]
feat_h, feat_w = featmap_sizes[i]
h, w, _ = img_meta['pad_shape']
valid_feat_h = min(int(np.ceil(h / anchor_stride)), feat_h)
valid_feat_w = min(int(np.ceil(w / anchor_stride)), feat_w)
flags = self.anchor_generators[i].valid_flags(
(feat_h, feat_w), (valid_feat_h, valid_feat_w),
device=device)
multi_level_flags.append(flags)
valid_flag_list.append(multi_level_flags)
return anchor_list, valid_flag_list
def loss_single(self, cls_score, bbox_pred, labels, label_weights, label_weights_confusion,
bbox_targets, bbox_weights, num_total_samples, cfg):
assert label_weights.shape == label_weights_confusion.shape
labels = labels.reshape(-1)
label_weights = label_weights.reshape(-1)
cls_score = cls_score.permute(0, 2, 3, 1).reshape(-1, self.cls_out_channels)
loss_cls_normal = self.loss_cls_normal(cls_score, labels, label_weights,
avg_factor=num_total_samples)
loss_cls_confusion = self.loss_cls_confusion(cls_score, labels,
label_weights_confusion, avg_factor=num_total_samples)
loss_cls = loss_cls_normal + loss_cls_confusion
bbox_targets = bbox_targets.reshape(-1, 4)
bbox_weights = bbox_weights.reshape(-1, 4)
bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4)
loss_bbox = self.loss_bbox(bbox_pred, bbox_targets, bbox_weights, avg_factor=num_total_samples)
return loss_cls, loss_bbox
@force_fp32(apply_to=('cls_scores', 'bbox_preds'))
def loss(self,
cls_scores,
bbox_preds,
gt_bboxes,
gt_labels,
img_metas,
cfg,
gt_bboxes_ignore=None):
featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores]
assert len(featmap_sizes) == len(self.anchor_generators)
device = cls_scores[0].device
anchor_list, valid_flag_list = self.get_anchors(
featmap_sizes, img_metas, device=device)
label_channels = self.cls_out_channels
cls_reg_targets = anchor_target_plus(anchor_list,
valid_flag_list,
gt_bboxes,
img_metas,
self.target_means,
self.target_stds,
cfg,
gt_bboxes_ignore_list=gt_bboxes_ignore,
gt_labels_list=gt_labels,
label_channels=label_channels,
sampling=self.sampling)
if cls_reg_targets is None:
return None
(labels_list, label_weights_list, confusion_weights_list, bbox_targets_list, bbox_weights_list,
num_total_pos, num_total_neg, num_total_confuse) = cls_reg_targets
num_total_samples = num_total_pos
losses_cls, losses_bbox = multi_apply(
self.loss_single,
cls_scores,
bbox_preds,
labels_list,
label_weights_list,
confusion_weights_list,
bbox_targets_list,
bbox_weights_list,
num_total_samples=num_total_samples,
cfg=cfg,)
return dict(loss_cls=losses_cls, loss_bbox=losses_bbox)
@force_fp32(apply_to=('cls_scores', 'bbox_preds'))
def get_bboxes(self, cls_scores, bbox_preds, img_metas, cfg,
rescale=False):
assert len(cls_scores) == len(bbox_preds)
num_levels = len(cls_scores)
device = cls_scores[0].device
mlvl_anchors = [
self.anchor_generators[i].grid_anchors(
cls_scores[i].size()[-2:],
self.anchor_strides[i],
device=device) for i in range(num_levels)
]
result_list = []
for img_id in range(len(img_metas)):
cls_score_list = [
cls_scores[i][img_id].detach() for i in range(num_levels)
]
bbox_pred_list = [
bbox_preds[i][img_id].detach() for i in range(num_levels)
]
img_shape = img_metas[img_id]['img_shape']
scale_factor = img_metas[img_id]['scale_factor']
proposals = self.get_bboxes_single(cls_score_list, bbox_pred_list,
mlvl_anchors, img_shape,
scale_factor, cfg, rescale)
result_list.append(proposals)
return result_list
def get_bboxes_single(self,
cls_scores,
bbox_preds,
mlvl_anchors,
img_shape,
scale_factor,
cfg,
rescale=False):
assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors)
mlvl_bboxes = []
mlvl_scores = []
for cls_score, bbox_pred, anchors in zip(cls_scores, bbox_preds,
mlvl_anchors):
assert cls_score.size()[-2:] == bbox_pred.size()[-2:]
cls_score = cls_score.permute(1, 2,
0).reshape(-1, self.cls_out_channels)
if self.use_sigmoid_cls:
scores = cls_score.sigmoid()
else:
scores = cls_score.softmax(-1)
bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4)
nms_pre = cfg.get('nms_pre', -1)
if nms_pre > 0 and scores.shape[0] > nms_pre:
if self.use_sigmoid_cls:
max_scores, _ = scores.max(dim=1)
else:
max_scores, _ = scores[:, 1:].max(dim=1)
_, topk_inds = max_scores.topk(nms_pre)
anchors = anchors[topk_inds, :]
bbox_pred = bbox_pred[topk_inds, :]
scores = scores[topk_inds, :]
bboxes = delta2bbox(anchors, bbox_pred, self.target_means,
self.target_stds, img_shape)
mlvl_bboxes.append(bboxes)
mlvl_scores.append(scores)
mlvl_bboxes = torch.cat(mlvl_bboxes)
if rescale:
mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor)
mlvl_scores = torch.cat(mlvl_scores)
if self.use_sigmoid_cls:
padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1)
mlvl_scores = torch.cat([padding, mlvl_scores], dim=1)
det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores,
cfg.score_thr, cfg.nms,
cfg.max_per_img)
return det_bboxes, det_labels
|
[
"torch.nn.ReLU",
"mmdet.core.anchor_target_plus",
"numpy.ceil",
"torch.nn.ModuleList",
"torch.nn.Conv2d",
"mmdet.core.AnchorGenerator",
"mmdet.core.force_fp32",
"mmdet.core.delta2bbox",
"mmcv.cnn.normal_init",
"mmdet.core.multiclass_nms",
"mmdet.core.multi_apply"
] |
[((7508, 7557), 'mmdet.core.force_fp32', 'force_fp32', ([], {'apply_to': "('cls_scores', 'bbox_preds')"}), "(apply_to=('cls_scores', 'bbox_preds'))\n", (7518, 7557), False, 'from mmdet.core import AnchorGenerator, anchor_target_plus, delta2bbox, force_fp32, multi_apply, multiclass_nms\n'), ((9172, 9221), 'mmdet.core.force_fp32', 'force_fp32', ([], {'apply_to': "('cls_scores', 'bbox_preds')"}), "(apply_to=('cls_scores', 'bbox_preds'))\n", (9182, 9221), False, 'from mmdet.core import AnchorGenerator, anchor_target_plus, delta2bbox, force_fp32, multi_apply, multiclass_nms\n'), ((3124, 3145), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3131, 3145), True, 'import torch.nn as nn\n'), ((3171, 3186), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (3184, 3186), True, 'import torch.nn as nn\n'), ((3212, 3227), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (3225, 3227), True, 'import torch.nn as nn\n'), ((3969, 4058), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.feat_channels', '(self.num_anchors * self.cls_out_channels)', '(3)'], {'padding': '(1)'}), '(self.feat_channels, self.num_anchors * self.cls_out_channels, 3,\n padding=1)\n', (3978, 4058), True, 'import torch.nn as nn\n'), ((4130, 4195), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.feat_channels', '(self.num_anchors * 4)', '(3)'], {'padding': '(1)'}), '(self.feat_channels, self.num_anchors * 4, 3, padding=1)\n', (4139, 4195), True, 'import torch.nn as nn\n'), ((4441, 4494), 'mmcv.cnn.normal_init', 'normal_init', (['self.retina_cls'], {'std': '(0.01)', 'bias': 'bias_cls'}), '(self.retina_cls, std=0.01, bias=bias_cls)\n', (4452, 4494), False, 'from mmcv.cnn import normal_init\n'), ((4503, 4541), 'mmcv.cnn.normal_init', 'normal_init', (['self.retina_reg'], {'std': '(0.01)'}), '(self.retina_reg, std=0.01)\n', (4514, 4541), False, 'from mmcv.cnn import normal_init\n'), ((4588, 4627), 'mmdet.core.multi_apply', 'multi_apply', (['self.forward_single', 'feats'], {}), '(self.forward_single, feats)\n', (4599, 4627), False, 'from mmdet.core import AnchorGenerator, anchor_target_plus, delta2bbox, force_fp32, multi_apply, multiclass_nms\n'), ((8115, 8363), 'mmdet.core.anchor_target_plus', 'anchor_target_plus', (['anchor_list', 'valid_flag_list', 'gt_bboxes', 'img_metas', 'self.target_means', 'self.target_stds', 'cfg'], {'gt_bboxes_ignore_list': 'gt_bboxes_ignore', 'gt_labels_list': 'gt_labels', 'label_channels': 'label_channels', 'sampling': 'self.sampling'}), '(anchor_list, valid_flag_list, gt_bboxes, img_metas, self\n .target_means, self.target_stds, cfg, gt_bboxes_ignore_list=\n gt_bboxes_ignore, gt_labels_list=gt_labels, label_channels=\n label_channels, sampling=self.sampling)\n', (8133, 8363), False, 'from mmdet.core import AnchorGenerator, anchor_target_plus, delta2bbox, force_fp32, multi_apply, multiclass_nms\n'), ((8785, 8987), 'mmdet.core.multi_apply', 'multi_apply', (['self.loss_single', 'cls_scores', 'bbox_preds', 'labels_list', 'label_weights_list', 'confusion_weights_list', 'bbox_targets_list', 'bbox_weights_list'], {'num_total_samples': 'num_total_samples', 'cfg': 'cfg'}), '(self.loss_single, cls_scores, bbox_preds, labels_list,\n label_weights_list, confusion_weights_list, bbox_targets_list,\n bbox_weights_list, num_total_samples=num_total_samples, cfg=cfg)\n', (8796, 8987), False, 'from mmdet.core import AnchorGenerator, anchor_target_plus, delta2bbox, force_fp32, multi_apply, multiclass_nms\n'), ((12428, 12514), 'mmdet.core.multiclass_nms', 'multiclass_nms', (['mlvl_bboxes', 'mlvl_scores', 'cfg.score_thr', 'cfg.nms', 'cfg.max_per_img'], {}), '(mlvl_bboxes, mlvl_scores, cfg.score_thr, cfg.nms, cfg.\n max_per_img)\n', (12442, 12514), False, 'from mmdet.core import AnchorGenerator, anchor_target_plus, delta2bbox, force_fp32, multi_apply, multiclass_nms\n'), ((4283, 4312), 'mmcv.cnn.normal_init', 'normal_init', (['m.conv'], {'std': '(0.01)'}), '(m.conv, std=0.01)\n', (4294, 4312), False, 'from mmcv.cnn import normal_init\n'), ((4358, 4387), 'mmcv.cnn.normal_init', 'normal_init', (['m.conv'], {'std': '(0.01)'}), '(m.conv, std=0.01)\n', (4369, 4387), False, 'from mmcv.cnn import normal_init\n'), ((11863, 11941), 'mmdet.core.delta2bbox', 'delta2bbox', (['anchors', 'bbox_pred', 'self.target_means', 'self.target_stds', 'img_shape'], {}), '(anchors, bbox_pred, self.target_means, self.target_stds, img_shape)\n', (11873, 11941), False, 'from mmdet.core import AnchorGenerator, anchor_target_plus, delta2bbox, force_fp32, multi_apply, multiclass_nms\n'), ((2904, 2967), 'mmdet.core.AnchorGenerator', 'AnchorGenerator', (['anchor_base', 'self.anchor_scales', 'anchor_ratios'], {}), '(anchor_base, self.anchor_scales, anchor_ratios)\n', (2919, 2967), False, 'from mmdet.core import AnchorGenerator, anchor_target_plus, delta2bbox, force_fp32, multi_apply, multiclass_nms\n'), ((5993, 6019), 'numpy.ceil', 'np.ceil', (['(h / anchor_stride)'], {}), '(h / anchor_stride)\n', (6000, 6019), True, 'import numpy as np\n'), ((6069, 6095), 'numpy.ceil', 'np.ceil', (['(w / anchor_stride)'], {}), '(w / anchor_stride)\n', (6076, 6095), True, 'import numpy as np\n')]
|
#!/usr/bin/python
"""A setuptools-based script for distributing and installing mcd."""
# Copyright 2014, 2015, 2016, 2017 <NAME>
# This file is part of mcd.
# See `License` for details of license and warranty.
import os
import numpy as np
from setuptools import setup
from setuptools.extension import Extension
from setuptools.command.sdist import sdist as _sdist
cython_locs = [
('mcd', 'metrics_fast'),
]
with open('README.rst') as readme_file:
long_description = readme_file.read()
requires = [ line.rstrip('\n') for line in open('requirements.txt') ]
# see "A note on setup.py" in README.rst for an explanation of the dev file
dev_mode = os.path.exists('dev')
if dev_mode:
from Cython.Distutils import build_ext
from Cython.Build import cythonize
class sdist(_sdist):
"""A cythonizing sdist command.
This class is a custom sdist command which ensures all cython-generated
C files are up-to-date before running the conventional sdist command.
"""
def run(self):
cythonize([ os.path.join(*loc)+'.pyx' for loc in cython_locs ])
_sdist.run(self)
cmdclass = {'build_ext': build_ext, 'sdist': sdist}
ext_modules = [
Extension('.'.join(loc), [os.path.join(*loc)+'.pyx'],
extra_compile_args=['-Wno-unused-but-set-variable', '-O3'],
include_dirs=[np.get_include()])
for loc in cython_locs
]
else:
cmdclass = {}
ext_modules = [
Extension('.'.join(loc), [os.path.join(*loc)+'.c'],
extra_compile_args=['-Wno-unused-but-set-variable', '-O3'],
include_dirs=[np.get_include()])
for loc in cython_locs
]
setup(
name='mcd',
version='0.5.dev1',
description='Mel cepstral distortion (MCD) computations in python.',
url='http://github.com/MattShannon/mcd',
author='<NAME>',
author_email='<EMAIL>',
license='3-clause BSD (see License file)',
packages=['mcd'],
install_requires=requires,
scripts=[
os.path.join('bin', 'dtw_synth'),
os.path.join('bin', 'get_mcd_dtw'),
os.path.join('bin', 'get_mcd_plain'),
],
long_description=long_description,
cmdclass=cmdclass,
ext_modules=ext_modules,
)
|
[
"setuptools.command.sdist.sdist.run",
"numpy.get_include",
"os.path.exists",
"os.path.join"
] |
[((657, 678), 'os.path.exists', 'os.path.exists', (['"""dev"""'], {}), "('dev')\n", (671, 678), False, 'import os\n'), ((1123, 1139), 'setuptools.command.sdist.sdist.run', '_sdist.run', (['self'], {}), '(self)\n', (1133, 1139), True, 'from setuptools.command.sdist import sdist as _sdist\n'), ((2052, 2084), 'os.path.join', 'os.path.join', (['"""bin"""', '"""dtw_synth"""'], {}), "('bin', 'dtw_synth')\n", (2064, 2084), False, 'import os\n'), ((2094, 2128), 'os.path.join', 'os.path.join', (['"""bin"""', '"""get_mcd_dtw"""'], {}), "('bin', 'get_mcd_dtw')\n", (2106, 2128), False, 'import os\n'), ((2138, 2174), 'os.path.join', 'os.path.join', (['"""bin"""', '"""get_mcd_plain"""'], {}), "('bin', 'get_mcd_plain')\n", (2150, 2174), False, 'import os\n'), ((1251, 1269), 'os.path.join', 'os.path.join', (['*loc'], {}), '(*loc)\n', (1263, 1269), False, 'import os\n'), ((1389, 1405), 'numpy.get_include', 'np.get_include', ([], {}), '()\n', (1403, 1405), True, 'import numpy as np\n'), ((1523, 1541), 'os.path.join', 'os.path.join', (['*loc'], {}), '(*loc)\n', (1535, 1541), False, 'import os\n'), ((1659, 1675), 'numpy.get_include', 'np.get_include', ([], {}), '()\n', (1673, 1675), True, 'import numpy as np\n'), ((1059, 1077), 'os.path.join', 'os.path.join', (['*loc'], {}), '(*loc)\n', (1071, 1077), False, 'import os\n')]
|
# Copyright 2013 Viewfinder Inc. All Rights Reserved.
"""Run analysis over all merged user analytics logs.
Computes speed percentiles for full asset scans (only those lasting more than 1s for more accurate numbers).
Automatically finds the list of merged logs in S3. If --start_date=YYYY-MM-DD is specified, only analyze logs
starting from a week before that date (we give user logs that much time to get uploaded).
Usage:
# Analyze all logs.
python -m viewfinder.backend.logs.analyze_analytics_logs
# Analyze logs from a specific start date.
python -m viewfinder.backend.logs.analyze_analytics_logs --start_date=2012-12-15
Other options:
-require_lock: default=True: hold the job:analyze_analytics lock during processing.
-smart_scan: default=False: determine the start date from previous run summaries.
-hours_between_runs: default=0: don't run if last successful run started less than this many hours ago.
"""
__author__ = '<EMAIL> (<NAME>)'
import cStringIO
import json
import logging
import numpy
import os
import sys
import time
import traceback
from collections import defaultdict, Counter
from tornado import gen, options
from viewfinder.backend.base import constants, main, statistics, util
from viewfinder.backend.base.dotdict import DotDict
from viewfinder.backend.db import db_client
from viewfinder.backend.db.job import Job
from viewfinder.backend.logs import logs_util
from viewfinder.backend.storage.object_store import ObjectStore
from viewfinder.backend.storage import store_utils
# TODO(marc): automatic date detection (eg: find latest metric entry and process from 30 days before).
options.define('start_date', default=None, help='Start date (filename start key). May be overridden by smart_scan.')
options.define('dry_run', default=True, help='Do not update dynamodb metrics table')
options.define('compute_today', default=False, help='Do not compute statistics for today, logs will be partial')
options.define('require_lock', type=bool, default=True,
help='attempt to grab the job:analyze_analytics lock before running. Exit if acquire fails.')
options.define('smart_scan', type=bool, default=False,
help='determine start_date from previous successful runs.')
options.define('hours_between_runs', type=int, default=0,
help='minimum time since start of last successful run (with dry_run=False)')
class DayStats(object):
def __init__(self, day):
self.day = day
self._scan_durations = []
self._long_scan_speeds = []
self._photos_scanned = []
# Number of unique users recording an event on this day.
self.event_users = Counter()
# Number of occurrences of an event aggregated across all users.
self.total_events = Counter()
def AddScan(self, version, photos, duration):
self._scan_durations.append(duration)
self._photos_scanned.append(photos)
if duration > 1.0:
self._long_scan_speeds.append(photos / duration)
def AddEvents(self, counters):
for name, count in counters.iteritems():
self.total_events[name] += count
self.event_users[name] += 1
def PrintSummary(self):
logging.info('Day: %s\n %s' % (self.day, statistics.FormatStats(self._long_scan_speeds, percentiles=[90,95,99])))
def ScanDurationPercentile(self, percentile):
return numpy.percentile(self._scan_durations, percentile)
def LongScanSpeedPercentile(self, percentile):
return numpy.percentile(self._long_scan_speeds, percentile)
def PhotosScannedPercentile(self, percentile):
return numpy.percentile(self._photos_scanned, percentile)
@gen.engine
def ProcessFiles(merged_store, filenames, callback):
"""Fetch and process each file contained in 'filenames'."""
@gen.engine
def _ProcessOneFile(contents, day_stats):
"""Iterate over the contents of a processed file: one entry per line. Increment stats for specific entries."""
buf = cStringIO.StringIO(contents)
buf.seek(0)
ui_events = Counter()
while True:
line = buf.readline()
if not line:
break
parsed = json.loads(line)
if not parsed:
continue
if 'version' not in parsed:
continue
# TODO(marc): lookup the user's device ID in dynamodb and get device model.
payload = parsed['payload']
if 'name' in payload:
if payload['name'] == '/assets/scan' and payload['type'] == 'full':
day_stats.AddScan(parsed['version'], payload['num_scanned'], payload['elapsed'])
elif payload['name'].startswith('/ui/'):
ui_events[payload['name']] += 1
if ui_events:
ui_events['/ui/anything'] += 1
day_stats.AddEvents(ui_events)
buf.close()
today = util.NowUTCToISO8601()
# Group filenames by day.
files_by_day = defaultdict(list)
for filename in filenames:
_, day, user = filename.split('/')
if options.options.compute_today or today != day:
files_by_day[day].append(filename)
# Compute per-day totals. Toss them into a list, we'll want it sorted.
stats_by_day = {}
for day in sorted(files_by_day.keys()):
# We don't really need to process days in-order, but it's nicer.
files = files_by_day[day]
day_stats = DayStats(day)
for f in files:
contents = ''
try:
contents = yield gen.Task(merged_store.Get, f)
except Exception as e:
logging.error('Error fetching file %s: %r' % (f, e))
continue
_ProcessOneFile(contents, day_stats)
if len(day_stats._long_scan_speeds) == 0:
continue
dd = DotDict()
for p in [1, 5, 10, 25, 50, 75, 90, 95, 99]:
dd['user_analytics.scans_gt1s_speed_percentile.%.2d' % p] = day_stats.LongScanSpeedPercentile(p)
dd['user_analytics.scans_duration_percentile.%.2d' % p] = day_stats.ScanDurationPercentile(p)
dd['user_analytics.scans_num_photos_percentile.%.2d' % p] = day_stats.PhotosScannedPercentile(p)
dd['user_analytics.ui.event_users'] = day_stats.event_users
dd['user_analytics.ui.total_events'] = day_stats.total_events
stats_by_day[day] = dd
callback(stats_by_day)
@gen.engine
def GetMergedLogsFileList(merged_store, marker, callback):
"""Fetch the list of file names from S3."""
registry_dir = os.path.join(logs_util.UserAnalyticsLogsPaths.kMergedLogsPrefix,
logs_util.UserAnalyticsLogsPaths.kRegistryDir)
def _WantFile(filename):
return not filename.startswith(registry_dir)
base_path = logs_util.UserAnalyticsLogsPaths.kMergedLogsPrefix + '/'
marker = os.path.join(base_path, marker) if marker is not None else None
file_list = yield gen.Task(store_utils.ListAllKeys, merged_store, prefix=base_path, marker=marker)
files = [f for f in file_list if _WantFile(f)]
files.sort()
logging.info('found %d merged log files, analyzing %d' % (len(file_list), len(files)))
callback(files)
@gen.engine
def RunOnce(client, job, callback):
"""Get list of files and call processing function."""
merged_store = ObjectStore.GetInstance(logs_util.UserAnalyticsLogsPaths.MERGED_LOGS_BUCKET)
start_date = options.options.start_date
if options.options.smart_scan:
# Search for successful full-scan run in the last week.
last_run = yield gen.Task(job.FindLastSuccess, with_payload_key='stats.last_day')
if last_run is None:
logging.info('No previous successful scan found, rerun with --start_date')
callback(None)
return
last_run_start = last_run['start_time']
if util.HoursSince(last_run_start) < options.options.hours_between_runs:
logging.info('Last successful run started at %s, less than %d hours ago; skipping.' %
(time.asctime(time.localtime(last_run_start)), options.options.hours_between_runs))
callback(None)
return
last_day = last_run['stats.last_day']
# Set scan_start to start of previous run - 30d (we need 30 days' worth of data to properly compute
# 30-day active users. Add an extra 3 days just in case we had some missing logs during the last run.
start_time = util.ISO8601ToUTCTimestamp(last_day, hour=12) - constants.SECONDS_PER_WEEK
start_date = util.TimestampUTCToISO8601(start_time)
logging.info('Last successful analyze_analytics run (%s) scanned up to %s, setting analysis start date to %s' %
(time.asctime(time.localtime(last_run_start)), last_day, start_date))
# Fetch list of merged logs.
files = yield gen.Task(GetMergedLogsFileList, merged_store, start_date)
day_stats = yield gen.Task(ProcessFiles, merged_store, files)
# Write per-day stats to dynamodb.
if len(day_stats) > 0:
hms = logs_util.kDailyMetricsTimeByLogType['analytics_logs']
yield gen.Task(logs_util.UpdateMetrics, client, day_stats, dry_run=options.options.dry_run, hms_tuple=hms)
last_day = sorted(day_stats.keys())[-1]
callback(last_day)
else:
callback(None)
@gen.engine
def _Start(callback):
"""Grab a lock on job:analyze_analytics and call RunOnce. If we get a return value, write it to the job summary."""
client = db_client.DBClient.Instance()
job = Job(client, 'analyze_analytics')
if options.options.require_lock:
got_lock = yield gen.Task(job.AcquireLock)
if got_lock == False:
logging.warning('Failed to acquire job lock: exiting.')
callback()
return
result = None
job.Start()
try:
result = yield gen.Task(RunOnce, client, job)
except:
# Failure: log run summary with trace.
typ, val, tb = sys.exc_info()
msg = ''.join(traceback.format_exception(typ, val, tb))
logging.info('Registering failed run with message: %s' % msg)
yield gen.Task(job.RegisterRun, Job.STATUS_FAILURE, failure_msg=msg)
else:
if result is not None and not options.options.dry_run:
# Successful run with data processed and not in dry-run mode: write run summary.
stats = DotDict()
stats['last_day'] = result
logging.info('Registering successful run with stats: %r' % stats)
yield gen.Task(job.RegisterRun, Job.STATUS_SUCCESS, stats=stats)
finally:
yield gen.Task(job.ReleaseLock)
callback()
if __name__ == '__main__':
sys.exit(main.InitAndRun(_Start))
|
[
"viewfinder.backend.storage.object_store.ObjectStore.GetInstance",
"collections.defaultdict",
"sys.exc_info",
"viewfinder.backend.base.util.TimestampUTCToISO8601",
"os.path.join",
"viewfinder.backend.base.dotdict.DotDict",
"logging.error",
"json.loads",
"logging.warning",
"collections.Counter",
"viewfinder.backend.base.statistics.FormatStats",
"tornado.gen.Task",
"time.localtime",
"traceback.format_exception",
"viewfinder.backend.base.util.HoursSince",
"viewfinder.backend.db.db_client.DBClient.Instance",
"numpy.percentile",
"viewfinder.backend.base.util.NowUTCToISO8601",
"viewfinder.backend.base.main.InitAndRun",
"viewfinder.backend.db.job.Job",
"cStringIO.StringIO",
"tornado.options.define",
"logging.info",
"viewfinder.backend.base.util.ISO8601ToUTCTimestamp"
] |
[((1614, 1735), 'tornado.options.define', 'options.define', (['"""start_date"""'], {'default': 'None', 'help': '"""Start date (filename start key). May be overridden by smart_scan."""'}), "('start_date', default=None, help=\n 'Start date (filename start key). May be overridden by smart_scan.')\n", (1628, 1735), False, 'from tornado import gen, options\n'), ((1731, 1820), 'tornado.options.define', 'options.define', (['"""dry_run"""'], {'default': '(True)', 'help': '"""Do not update dynamodb metrics table"""'}), "('dry_run', default=True, help=\n 'Do not update dynamodb metrics table')\n", (1745, 1820), False, 'from tornado import gen, options\n'), ((1816, 1933), 'tornado.options.define', 'options.define', (['"""compute_today"""'], {'default': '(False)', 'help': '"""Do not compute statistics for today, logs will be partial"""'}), "('compute_today', default=False, help=\n 'Do not compute statistics for today, logs will be partial')\n", (1830, 1933), False, 'from tornado import gen, options\n'), ((1929, 2088), 'tornado.options.define', 'options.define', (['"""require_lock"""'], {'type': 'bool', 'default': '(True)', 'help': '"""attempt to grab the job:analyze_analytics lock before running. Exit if acquire fails."""'}), "('require_lock', type=bool, default=True, help=\n 'attempt to grab the job:analyze_analytics lock before running. Exit if acquire fails.'\n )\n", (1943, 2088), False, 'from tornado import gen, options\n'), ((2094, 2213), 'tornado.options.define', 'options.define', (['"""smart_scan"""'], {'type': 'bool', 'default': '(False)', 'help': '"""determine start_date from previous successful runs."""'}), "('smart_scan', type=bool, default=False, help=\n 'determine start_date from previous successful runs.')\n", (2108, 2213), False, 'from tornado import gen, options\n'), ((2224, 2363), 'tornado.options.define', 'options.define', (['"""hours_between_runs"""'], {'type': 'int', 'default': '(0)', 'help': '"""minimum time since start of last successful run (with dry_run=False)"""'}), "('hours_between_runs', type=int, default=0, help=\n 'minimum time since start of last successful run (with dry_run=False)')\n", (2238, 2363), False, 'from tornado import gen, options\n'), ((4678, 4700), 'viewfinder.backend.base.util.NowUTCToISO8601', 'util.NowUTCToISO8601', ([], {}), '()\n', (4698, 4700), False, 'from viewfinder.backend.base import constants, main, statistics, util\n'), ((4746, 4763), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (4757, 4763), False, 'from collections import defaultdict, Counter\n'), ((6201, 6317), 'os.path.join', 'os.path.join', (['logs_util.UserAnalyticsLogsPaths.kMergedLogsPrefix', 'logs_util.UserAnalyticsLogsPaths.kRegistryDir'], {}), '(logs_util.UserAnalyticsLogsPaths.kMergedLogsPrefix, logs_util.\n UserAnalyticsLogsPaths.kRegistryDir)\n', (6213, 6317), False, 'import os\n'), ((6962, 7038), 'viewfinder.backend.storage.object_store.ObjectStore.GetInstance', 'ObjectStore.GetInstance', (['logs_util.UserAnalyticsLogsPaths.MERGED_LOGS_BUCKET'], {}), '(logs_util.UserAnalyticsLogsPaths.MERGED_LOGS_BUCKET)\n', (6985, 7038), False, 'from viewfinder.backend.storage.object_store import ObjectStore\n'), ((9023, 9052), 'viewfinder.backend.db.db_client.DBClient.Instance', 'db_client.DBClient.Instance', ([], {}), '()\n', (9050, 9052), False, 'from viewfinder.backend.db import db_client\n'), ((9061, 9093), 'viewfinder.backend.db.job.Job', 'Job', (['client', '"""analyze_analytics"""'], {}), "(client, 'analyze_analytics')\n", (9064, 9093), False, 'from viewfinder.backend.db.job import Job\n'), ((2621, 2630), 'collections.Counter', 'Counter', ([], {}), '()\n', (2628, 2630), False, 'from collections import defaultdict, Counter\n'), ((2724, 2733), 'collections.Counter', 'Counter', ([], {}), '()\n', (2731, 2733), False, 'from collections import defaultdict, Counter\n'), ((3300, 3350), 'numpy.percentile', 'numpy.percentile', (['self._scan_durations', 'percentile'], {}), '(self._scan_durations, percentile)\n', (3316, 3350), False, 'import numpy\n'), ((3412, 3464), 'numpy.percentile', 'numpy.percentile', (['self._long_scan_speeds', 'percentile'], {}), '(self._long_scan_speeds, percentile)\n', (3428, 3464), False, 'import numpy\n'), ((3526, 3576), 'numpy.percentile', 'numpy.percentile', (['self._photos_scanned', 'percentile'], {}), '(self._photos_scanned, percentile)\n', (3542, 3576), False, 'import numpy\n'), ((3890, 3918), 'cStringIO.StringIO', 'cStringIO.StringIO', (['contents'], {}), '(contents)\n', (3908, 3918), False, 'import cStringIO\n'), ((3951, 3960), 'collections.Counter', 'Counter', ([], {}), '()\n', (3958, 3960), False, 'from collections import defaultdict, Counter\n'), ((5518, 5527), 'viewfinder.backend.base.dotdict.DotDict', 'DotDict', ([], {}), '()\n', (5525, 5527), False, 'from viewfinder.backend.base.dotdict import DotDict\n'), ((6502, 6533), 'os.path.join', 'os.path.join', (['base_path', 'marker'], {}), '(base_path, marker)\n', (6514, 6533), False, 'import os\n'), ((6586, 6671), 'tornado.gen.Task', 'gen.Task', (['store_utils.ListAllKeys', 'merged_store'], {'prefix': 'base_path', 'marker': 'marker'}), '(store_utils.ListAllKeys, merged_store, prefix=base_path, marker=marker\n )\n', (6594, 6671), False, 'from tornado import gen, options\n'), ((8114, 8152), 'viewfinder.backend.base.util.TimestampUTCToISO8601', 'util.TimestampUTCToISO8601', (['start_time'], {}), '(start_time)\n', (8140, 8152), False, 'from viewfinder.backend.base import constants, main, statistics, util\n'), ((8404, 8461), 'tornado.gen.Task', 'gen.Task', (['GetMergedLogsFileList', 'merged_store', 'start_date'], {}), '(GetMergedLogsFileList, merged_store, start_date)\n', (8412, 8461), False, 'from tornado import gen, options\n'), ((8482, 8525), 'tornado.gen.Task', 'gen.Task', (['ProcessFiles', 'merged_store', 'files'], {}), '(ProcessFiles, merged_store, files)\n', (8490, 8525), False, 'from tornado import gen, options\n'), ((10124, 10147), 'viewfinder.backend.base.main.InitAndRun', 'main.InitAndRun', (['_Start'], {}), '(_Start)\n', (10139, 10147), False, 'from viewfinder.backend.base import constants, main, statistics, util\n'), ((4053, 4069), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (4063, 4069), False, 'import json\n'), ((7196, 7260), 'tornado.gen.Task', 'gen.Task', (['job.FindLastSuccess'], {'with_payload_key': '"""stats.last_day"""'}), "(job.FindLastSuccess, with_payload_key='stats.last_day')\n", (7204, 7260), False, 'from tornado import gen, options\n'), ((7292, 7366), 'logging.info', 'logging.info', (['"""No previous successful scan found, rerun with --start_date"""'], {}), "('No previous successful scan found, rerun with --start_date')\n", (7304, 7366), False, 'import logging\n'), ((7453, 7484), 'viewfinder.backend.base.util.HoursSince', 'util.HoursSince', (['last_run_start'], {}), '(last_run_start)\n', (7468, 7484), False, 'from viewfinder.backend.base import constants, main, statistics, util\n'), ((8022, 8067), 'viewfinder.backend.base.util.ISO8601ToUTCTimestamp', 'util.ISO8601ToUTCTimestamp', (['last_day'], {'hour': '(12)'}), '(last_day, hour=12)\n', (8048, 8067), False, 'from viewfinder.backend.base import constants, main, statistics, util\n'), ((8664, 8769), 'tornado.gen.Task', 'gen.Task', (['logs_util.UpdateMetrics', 'client', 'day_stats'], {'dry_run': 'options.options.dry_run', 'hms_tuple': 'hms'}), '(logs_util.UpdateMetrics, client, day_stats, dry_run=options.\n options.dry_run, hms_tuple=hms)\n', (8672, 8769), False, 'from tornado import gen, options\n'), ((9151, 9176), 'tornado.gen.Task', 'gen.Task', (['job.AcquireLock'], {}), '(job.AcquireLock)\n', (9159, 9176), False, 'from tornado import gen, options\n'), ((9209, 9264), 'logging.warning', 'logging.warning', (['"""Failed to acquire job lock: exiting."""'], {}), "('Failed to acquire job lock: exiting.')\n", (9224, 9264), False, 'import logging\n'), ((9352, 9382), 'tornado.gen.Task', 'gen.Task', (['RunOnce', 'client', 'job'], {}), '(RunOnce, client, job)\n', (9360, 9382), False, 'from tornado import gen, options\n'), ((9455, 9469), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (9467, 9469), False, 'import sys\n'), ((9534, 9595), 'logging.info', 'logging.info', (["('Registering failed run with message: %s' % msg)"], {}), "('Registering failed run with message: %s' % msg)\n", (9546, 9595), False, 'import logging\n'), ((9837, 9846), 'viewfinder.backend.base.dotdict.DotDict', 'DotDict', ([], {}), '()\n', (9844, 9846), False, 'from viewfinder.backend.base.dotdict import DotDict\n'), ((9886, 9951), 'logging.info', 'logging.info', (["('Registering successful run with stats: %r' % stats)"], {}), "('Registering successful run with stats: %r' % stats)\n", (9898, 9951), False, 'import logging\n'), ((10044, 10069), 'tornado.gen.Task', 'gen.Task', (['job.ReleaseLock'], {}), '(job.ReleaseLock)\n', (10052, 10069), False, 'from tornado import gen, options\n'), ((9488, 9528), 'traceback.format_exception', 'traceback.format_exception', (['typ', 'val', 'tb'], {}), '(typ, val, tb)\n', (9514, 9528), False, 'import traceback\n'), ((9606, 9668), 'tornado.gen.Task', 'gen.Task', (['job.RegisterRun', 'Job.STATUS_FAILURE'], {'failure_msg': 'msg'}), '(job.RegisterRun, Job.STATUS_FAILURE, failure_msg=msg)\n', (9614, 9668), False, 'from tornado import gen, options\n'), ((9964, 10022), 'tornado.gen.Task', 'gen.Task', (['job.RegisterRun', 'Job.STATUS_SUCCESS'], {'stats': 'stats'}), '(job.RegisterRun, Job.STATUS_SUCCESS, stats=stats)\n', (9972, 10022), False, 'from tornado import gen, options\n'), ((3167, 3239), 'viewfinder.backend.base.statistics.FormatStats', 'statistics.FormatStats', (['self._long_scan_speeds'], {'percentiles': '[90, 95, 99]'}), '(self._long_scan_speeds, percentiles=[90, 95, 99])\n', (3189, 3239), False, 'from viewfinder.backend.base import constants, main, statistics, util\n'), ((5268, 5297), 'tornado.gen.Task', 'gen.Task', (['merged_store.Get', 'f'], {}), '(merged_store.Get, f)\n', (5276, 5297), False, 'from tornado import gen, options\n'), ((5335, 5387), 'logging.error', 'logging.error', (["('Error fetching file %s: %r' % (f, e))"], {}), "('Error fetching file %s: %r' % (f, e))\n", (5348, 5387), False, 'import logging\n'), ((8300, 8330), 'time.localtime', 'time.localtime', (['last_run_start'], {}), '(last_run_start)\n', (8314, 8330), False, 'import time\n'), ((7648, 7678), 'time.localtime', 'time.localtime', (['last_run_start'], {}), '(last_run_start)\n', (7662, 7678), False, 'import time\n')]
|
"""
SPDX-License-Identifier: Apache-2.0
Copyright (C) 2021, Arm Limited and contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import numpy as np
from typing import List
import scipy.special as sc
import six
from six.moves import range
def error_and_exit(msg: str):
print('[ERROR]: ', msg)
sys.exit(1)
def calculate_output(output_arr: np.ndarray, confidence_interval: float = 0.95, non_parametric: bool = True):
"""
It calculates an averaged output across MCDO samples with confidence intervals for each outputted class.
Parameters
----------
output_arr: np.ndarray of shape (num_branches, num_elems, output_size)
An array with the outputs of each MCDO sample. Each MCDO sample is expected to NOT have softmax
applied (i.e., accepts logits).
confidence_interval: float
The confidence interval between [0, 1]. If non_parametric=False then only the values 0.8, 0.9, 0.95, and 0.99 are
currently available.
non_parametric: bool, default=True
If non_parametric=True, calculates non parametric confidence intervals, in which no assumptions are made about
the underlying distribution and intervals are calculated based only on the percentiles.
If non_parametric=False, intervals are calculated assuming samples are normally distributed, and only the
confidence intervals 0.8, 0.9, 0.95, and 0.99 can be used in this case. Be careful as the assumption of normality
can generate unreasonable numbers when num_elems is too small (e.g. values below zero when output is softmaxed).
Returns
-------
mean_preds : ndarray of shape (num_elems, output_size)
Averaged scores across sampled outputs
lower_lim : ndarray of shape (num_elems, output_size)
Lower limit of the confidence interval per class output
upper_lim : ndarray of shape (num_elems, output_size)
Upper limit of the confidence interval per class output
std_preds : ndarray of shape (num_elems, output_size)
Standard deviations across sampled outputs
"""
if confidence_interval < 0 or confidence_interval > 1:
error_and_exit('Confidence interval needs to be between 0 and 1.')
if output_arr.ndim != 3:
error_and_exit('output_arr does not have the expected dimension of [num_branches, num_elems, output_size].')
# Softmax the logits (last axis)
predictions = sc.softmax(output_arr, axis=-1)
# [num_elems, 10]
mean_preds = np.mean(predictions, axis=0)
std_preds = np.std(predictions, axis=0)
if non_parametric:
ci = confidence_interval
lower_lim = np.quantile(predictions, 0.5 - ci / 2, axis=0) # lower limit of the CI
upper_lim = np.quantile(predictions, 0.5 + ci / 2, axis=0) # upper limit of the CI
else:
num_samples = predictions.shape[1]
zscores = {0.8: 1.282, 0.9: 1.645, 0.95: 1.96, 0.99: 2.576}
if round(confidence_interval, 2) not in zscores.keys():
error_and_exit(
f'Confidence interval not supportted. Only the following are supported for parametric calculation: {zscores.keys()}')
if num_samples < 30:
print(
'Warning: calculating a parametric confidence interval with number of samples < 30 is not recommended.')
z = zscores[round(confidence_interval, 2)]
se = std_preds / np.sqrt(num_samples)
lower_lim = mean_preds - z * se # lower limit of the CI
upper_lim = mean_preds + z * se # upper limit of the CI
return mean_preds, lower_lim, upper_lim, std_preds
################################################################################
# Inference metrics #
################################################################################
def bin_centers_of_mass(probabilities, bin_edges):
probabilities = np.where(probabilities == 0, 1e-8, probabilities)
indices = np.digitize(probabilities, bin_edges, right=True)
return np.array([np.mean(probabilities[indices == i])
for i in range(1, len(bin_edges))])
def bin_predictions_and_accuracies(probabilities, ground_truth, bins=10):
"""A helper function which histograms a vector of probabilities into bins.
Args:
probabilities: A numpy vector of N probabilities assigned to each prediction
ground_truth: A numpy vector of N ground truth labels in {0,1}
bins: Number of equal width bins to bin predictions into in [0, 1], or an
array representing bin edges.
Returns:
bin_edges: Numpy vector of floats containing the edges of the bins
(including leftmost and rightmost).
accuracies: Numpy vector of floats for the average accuracy of the
predictions in each bin.
counts: Numpy vector of ints containing the number of examples per bin.
"""
_validate_probabilities(probabilities)
_check_rank_nonempty(rank=1,
probabilities=probabilities,
ground_truth=ground_truth)
if len(probabilities) != len(ground_truth):
raise ValueError(
'Probabilies and ground truth must have the same number of elements.')
if [v for v in ground_truth if v not in [0., 1., True, False]]:
raise ValueError(
'Ground truth must contain binary labels {0,1} or {False, True}.')
if isinstance(bins, int):
num_bins = bins
else:
num_bins = bins.size - 1
# Ensure probabilities are never 0, since the bins in np.digitize are open on
# one side.
probabilities = np.where(probabilities == 0, 1e-8, probabilities)
counts, bin_edges = np.histogram(probabilities, bins=bins, range=[0., 1.])
indices = np.digitize(probabilities, bin_edges, right=True)
accuracies = np.array([np.mean(ground_truth[indices == i])
for i in range(1, num_bins + 1)])
return bin_edges, accuracies, counts
def expected_calibration_error(probabilities, ground_truth, bins=15):
"""Compute the expected calibration error of a set of preditions in [0, 1].
Args:
probabilities: A numpy vector of N probabilities assigned to each prediction
ground_truth: A numpy vector of N ground truth labels in {0,1, True, False}
bins: Number of equal width bins to bin predictions into in [0, 1], or
an array representing bin edges.
Returns:
Float: the expected calibration error.
"""
bin_edges, accuracies, counts = bin_predictions_and_accuracies(
probabilities, ground_truth, bins)
bin_centers = bin_centers_of_mass(probabilities, bin_edges)
num_examples = np.sum(counts)
ece = np.sum([(counts[i] / float(num_examples)) * np.sum(
np.abs(bin_centers[i] - accuracies[i]))
for i in range(bin_centers.size) if counts[i] > 0])
return ece
def _check_rank_nonempty(rank, **kwargs):
for key, array in six.iteritems(kwargs):
if len(array) <= 1 or array.ndim != rank:
raise ValueError(
'%s must be a rank-1 array of length > 1; actual shape is %s.' %
(key, array.shape))
def _validate_probabilities(probabilities, multiclass=False):
if np.max(probabilities) > 1. or np.min(probabilities) < 0.:
raise ValueError('All probabilities must be in [0,1].')
if multiclass and not np.allclose(1, np.sum(probabilities, axis=-1),
atol=1e-5):
raise ValueError(
'Multiclass probabilities must sum to 1 along the last dimension.')
def get_multiclass_predictions_and_correctness(probabilities, labels, top_k=1):
"""Returns predicted class, correctness boolean vector."""
_validate_probabilities(probabilities, multiclass=True)
_check_rank_nonempty(rank=1, labels=labels)
_check_rank_nonempty(rank=2, probabilities=probabilities)
if top_k == 1:
class_predictions = np.argmax(probabilities, -1)
top_k_probs = probabilities[np.arange(len(labels)), class_predictions]
is_correct = np.equal(class_predictions, labels)
else:
top_k_probs, is_correct = _filter_top_k(probabilities, labels, top_k)
return top_k_probs, is_correct
def expected_calibration_error_multiclass(probabilities, labels, bins=15,
top_k=1):
"""Computes expected calibration error from Guo et al. 2017.
For details, see https://arxiv.org/abs/1706.04599.
Note: If top_k is None, this only measures calibration of the argmax
prediction.
Args:
probabilities: Array of probabilities of shape [num_samples, num_classes].
labels: Integer array labels of shape [num_samples].
bins: Number of equal width bins to bin predictions into in [0, 1], or
an array representing bin edges.
top_k: Integer or None. If integer, use the top k predicted
probabilities in ECE calculation (can be informative for problems with
many classes and lower top-1 accuracy). If None, use all classes.
Returns:
float: Expected calibration error.
"""
top_k_probs, is_correct = get_multiclass_predictions_and_correctness(
probabilities, labels, top_k)
top_k_probs = top_k_probs.flatten()
is_correct = is_correct.flatten()
return expected_calibration_error(top_k_probs, is_correct, bins)
def metrics_from_stats(stats):
"""Compute metrics from a stats dictionary."""
labels, probs = stats['labels'], stats['probs']
# Reshape binary predictions to 2-class.
if len(probs.shape) == 1:
probs = np.stack([1-probs, probs], axis=-1)
assert len(probs.shape) == 2
predictions = np.argmax(probs, axis=-1)
accuracy = np.equal(labels, predictions)
label_probs = probs[np.arange(len(labels)), labels]
log_probs = np.maximum(-1e10, np.log(label_probs))
brier_scores = np.square(probs).sum(-1) - 2 * label_probs
ece = expected_calibration_error_multiclass(probs, labels)
return {'accuracy': accuracy.mean(0),
'brier_score': brier_scores.mean(0),
'log_prob': log_probs.mean(0),
'ece': ece}
|
[
"numpy.sum",
"numpy.abs",
"numpy.argmax",
"numpy.histogram",
"numpy.mean",
"six.iteritems",
"six.moves.range",
"numpy.std",
"numpy.equal",
"numpy.max",
"numpy.stack",
"numpy.square",
"numpy.min",
"scipy.special.softmax",
"numpy.digitize",
"sys.exit",
"numpy.quantile",
"numpy.log",
"numpy.where",
"numpy.sqrt"
] |
[((813, 824), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (821, 824), False, 'import sys\n'), ((2927, 2958), 'scipy.special.softmax', 'sc.softmax', (['output_arr'], {'axis': '(-1)'}), '(output_arr, axis=-1)\n', (2937, 2958), True, 'import scipy.special as sc\n'), ((2999, 3027), 'numpy.mean', 'np.mean', (['predictions'], {'axis': '(0)'}), '(predictions, axis=0)\n', (3006, 3027), True, 'import numpy as np\n'), ((3044, 3071), 'numpy.std', 'np.std', (['predictions'], {'axis': '(0)'}), '(predictions, axis=0)\n', (3050, 3071), True, 'import numpy as np\n'), ((4427, 4477), 'numpy.where', 'np.where', (['(probabilities == 0)', '(1e-08)', 'probabilities'], {}), '(probabilities == 0, 1e-08, probabilities)\n', (4435, 4477), True, 'import numpy as np\n'), ((4489, 4538), 'numpy.digitize', 'np.digitize', (['probabilities', 'bin_edges'], {'right': '(True)'}), '(probabilities, bin_edges, right=True)\n', (4500, 4538), True, 'import numpy as np\n'), ((6067, 6117), 'numpy.where', 'np.where', (['(probabilities == 0)', '(1e-08)', 'probabilities'], {}), '(probabilities == 0, 1e-08, probabilities)\n', (6075, 6117), True, 'import numpy as np\n'), ((6139, 6195), 'numpy.histogram', 'np.histogram', (['probabilities'], {'bins': 'bins', 'range': '[0.0, 1.0]'}), '(probabilities, bins=bins, range=[0.0, 1.0])\n', (6151, 6195), True, 'import numpy as np\n'), ((6206, 6255), 'numpy.digitize', 'np.digitize', (['probabilities', 'bin_edges'], {'right': '(True)'}), '(probabilities, bin_edges, right=True)\n', (6217, 6255), True, 'import numpy as np\n'), ((7094, 7108), 'numpy.sum', 'np.sum', (['counts'], {}), '(counts)\n', (7100, 7108), True, 'import numpy as np\n'), ((7360, 7381), 'six.iteritems', 'six.iteritems', (['kwargs'], {}), '(kwargs)\n', (7373, 7381), False, 'import six\n'), ((9990, 10015), 'numpy.argmax', 'np.argmax', (['probs'], {'axis': '(-1)'}), '(probs, axis=-1)\n', (9999, 10015), True, 'import numpy as np\n'), ((10029, 10058), 'numpy.equal', 'np.equal', (['labels', 'predictions'], {}), '(labels, predictions)\n', (10037, 10058), True, 'import numpy as np\n'), ((3149, 3195), 'numpy.quantile', 'np.quantile', (['predictions', '(0.5 - ci / 2)'], {'axis': '(0)'}), '(predictions, 0.5 - ci / 2, axis=0)\n', (3160, 3195), True, 'import numpy as np\n'), ((3241, 3287), 'numpy.quantile', 'np.quantile', (['predictions', '(0.5 + ci / 2)'], {'axis': '(0)'}), '(predictions, 0.5 + ci / 2, axis=0)\n', (3252, 3287), True, 'import numpy as np\n'), ((8309, 8337), 'numpy.argmax', 'np.argmax', (['probabilities', '(-1)'], {}), '(probabilities, -1)\n', (8318, 8337), True, 'import numpy as np\n'), ((8430, 8465), 'numpy.equal', 'np.equal', (['class_predictions', 'labels'], {}), '(class_predictions, labels)\n', (8438, 8465), True, 'import numpy as np\n'), ((9906, 9943), 'numpy.stack', 'np.stack', (['[1 - probs, probs]'], {'axis': '(-1)'}), '([1 - probs, probs], axis=-1)\n', (9914, 9943), True, 'import numpy as np\n'), ((10146, 10165), 'numpy.log', 'np.log', (['label_probs'], {}), '(label_probs)\n', (10152, 10165), True, 'import numpy as np\n'), ((3907, 3927), 'numpy.sqrt', 'np.sqrt', (['num_samples'], {}), '(num_samples)\n', (3914, 3927), True, 'import numpy as np\n'), ((4558, 4594), 'numpy.mean', 'np.mean', (['probabilities[indices == i]'], {}), '(probabilities[indices == i])\n', (4565, 4594), True, 'import numpy as np\n'), ((6281, 6316), 'numpy.mean', 'np.mean', (['ground_truth[indices == i]'], {}), '(ground_truth[indices == i])\n', (6288, 6316), True, 'import numpy as np\n'), ((7626, 7647), 'numpy.max', 'np.max', (['probabilities'], {}), '(probabilities)\n', (7632, 7647), True, 'import numpy as np\n'), ((7656, 7677), 'numpy.min', 'np.min', (['probabilities'], {}), '(probabilities)\n', (7662, 7677), True, 'import numpy as np\n'), ((6351, 6373), 'six.moves.range', 'range', (['(1)', '(num_bins + 1)'], {}), '(1, num_bins + 1)\n', (6356, 6373), False, 'from six.moves import range\n'), ((7241, 7264), 'six.moves.range', 'range', (['bin_centers.size'], {}), '(bin_centers.size)\n', (7246, 7264), False, 'from six.moves import range\n'), ((7783, 7813), 'numpy.sum', 'np.sum', (['probabilities'], {'axis': '(-1)'}), '(probabilities, axis=-1)\n', (7789, 7813), True, 'import numpy as np\n'), ((10184, 10200), 'numpy.square', 'np.square', (['probs'], {}), '(probs)\n', (10193, 10200), True, 'import numpy as np\n'), ((7176, 7214), 'numpy.abs', 'np.abs', (['(bin_centers[i] - accuracies[i])'], {}), '(bin_centers[i] - accuracies[i])\n', (7182, 7214), True, 'import numpy as np\n')]
|
import os
import sys
sys.path.insert(0, os.path.abspath('./src/'))
def make_test_rgb(saveflag=True, filename='./test/test_image.tif'):
"""
generate known RGB pixel array and output TIFF file
:param saveflag: flag to save output TIFF file, default True
:param filename: path and filename of saved TFF test file
:return: RGB pixel array
"""
from PIL import Image
import numpy as np
w, h = 10, 10
rgb = np.zeros((h, w, 3), dtype=np.uint8)
rgb[5, 5] = (100., 0., 0.)
rgb[1, 1] = (100., 0., 100.)
rgb[9, :] = 255.*np.ones((1, w, 3))
if saveflag:
img = Image.fromarray(rgb, 'RGB')
img.save(filename)
rgb = rgb.astype('float')
return rgb
def test_read_tiff():
from preprocess import read_tiff
import numpy as np
testfile = './test/test_image.tif'
# compare rgb values loaded from test file with known pixel values
expected = make_test_rgb(saveflag=True, filename=testfile)
actual = read_tiff(filename=testfile)
assert np.array_equal(expected, actual)
# remove test TIFF file when finished
os.remove(testfile)
def test_extract_hist():
from preprocess import extract_hist
import numpy as np
pix_array = np.zeros((2, 2))
pix_array[1, 1] = 255
pix_array[0, 1] = 10
pix_array[0, 0] = 10
pix_array[1, 0] = np.nan
actual = extract_hist(pix_array)
expected = np.zeros(256)
expected[255] = 1
expected[10] = 2
assert np.array_equal(actual, expected)
def test_nan_background():
from preprocess import nan_background
import numpy as np
rgb = np.zeros((2, 2, 3))
rgb[1, 1, 1] = 255
actual = nan_background(rgb)
expected = np.nan*np.empty((2, 2, 3))
expected[1, 1, :] = (0, 255, 0)
assert np.allclose(actual, expected, rtol=1e-05, atol=1e-08,
equal_nan=True)
def test_nan_upper_bound():
from preprocess import nan_upper_bound
import numpy as np
rgb = 254*np.ones((2, 2, 3))
rgb[1, 1, :] = (0, 255, 0)
actual = nan_upper_bound(rgb, (250, 250, 250))
expected = np.nan*np.empty((2, 2, 3))
expected[1, 1, :] = (0, 255, 0)
assert np.allclose(actual, expected, rtol=1e-05, atol=1e-08,
equal_nan=True)
def test_nan_yellow_pixels():
from preprocess import nan_yellow_pixels
import numpy as np
rgb = np.ones((10, 10, 3))
rgb[1, 1, :] = [255, 255, 40]
rgb = nan_yellow_pixels(rgb)
assert np.isnan(rgb[1, 1, 1])
assert np.isfinite(rgb[0, 0, 2])
def test_rgb_histogram():
from preprocess import rgb_histogram, rgb_preprocess
import numpy as np
rgb = make_test_rgb(saveflag=False)
rgb = rgb_preprocess(rgb, exclude_bg=True, upper_lim=(200, 200, 200))
rh, gh, bh = rgb_histogram(rgb, process=False)
expected_rh = np.zeros(256)
expected_rh[100] = 2
expected_gh = np.zeros(256)
expected_gh[0] = 2
expected_bh = np.zeros(256)
expected_bh[100] = 1
expected_bh[0] = 1
assert np.array_equal(rh, expected_rh)
assert np.array_equal(gh, expected_gh)
assert np.array_equal(bh, expected_bh)
rh, gh, bh = rgb_histogram(rgb, process=True)
expected_rh = np.zeros(256)
expected_rh[100] = 1
expected_gh = np.zeros(256)
expected_gh[0] = 1
expected_bh = np.zeros(256)
expected_bh[100] = 0.5
expected_bh[0] = 0.5
assert np.array_equal(rh, expected_rh)
assert np.array_equal(gh, expected_gh)
assert np.array_equal(bh, expected_bh)
|
[
"preprocess.nan_upper_bound",
"preprocess.nan_yellow_pixels",
"os.path.abspath",
"os.remove",
"numpy.empty",
"preprocess.read_tiff",
"preprocess.extract_hist",
"numpy.zeros",
"numpy.allclose",
"numpy.ones",
"numpy.isnan",
"numpy.isfinite",
"PIL.Image.fromarray",
"preprocess.rgb_histogram",
"numpy.array_equal",
"preprocess.nan_background",
"preprocess.rgb_preprocess"
] |
[((40, 65), 'os.path.abspath', 'os.path.abspath', (['"""./src/"""'], {}), "('./src/')\n", (55, 65), False, 'import os\n'), ((444, 479), 'numpy.zeros', 'np.zeros', (['(h, w, 3)'], {'dtype': 'np.uint8'}), '((h, w, 3), dtype=np.uint8)\n', (452, 479), True, 'import numpy as np\n'), ((989, 1017), 'preprocess.read_tiff', 'read_tiff', ([], {'filename': 'testfile'}), '(filename=testfile)\n', (998, 1017), False, 'from preprocess import read_tiff\n'), ((1030, 1062), 'numpy.array_equal', 'np.array_equal', (['expected', 'actual'], {}), '(expected, actual)\n', (1044, 1062), True, 'import numpy as np\n'), ((1110, 1129), 'os.remove', 'os.remove', (['testfile'], {}), '(testfile)\n', (1119, 1129), False, 'import os\n'), ((1237, 1253), 'numpy.zeros', 'np.zeros', (['(2, 2)'], {}), '((2, 2))\n', (1245, 1253), True, 'import numpy as np\n'), ((1372, 1395), 'preprocess.extract_hist', 'extract_hist', (['pix_array'], {}), '(pix_array)\n', (1384, 1395), False, 'from preprocess import extract_hist\n'), ((1412, 1425), 'numpy.zeros', 'np.zeros', (['(256)'], {}), '(256)\n', (1420, 1425), True, 'import numpy as np\n'), ((1481, 1513), 'numpy.array_equal', 'np.array_equal', (['actual', 'expected'], {}), '(actual, expected)\n', (1495, 1513), True, 'import numpy as np\n'), ((1619, 1638), 'numpy.zeros', 'np.zeros', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (1627, 1638), True, 'import numpy as np\n'), ((1676, 1695), 'preprocess.nan_background', 'nan_background', (['rgb'], {}), '(rgb)\n', (1690, 1695), False, 'from preprocess import nan_background\n'), ((1786, 1855), 'numpy.allclose', 'np.allclose', (['actual', 'expected'], {'rtol': '(1e-05)', 'atol': '(1e-08)', 'equal_nan': '(True)'}), '(actual, expected, rtol=1e-05, atol=1e-08, equal_nan=True)\n', (1797, 1855), True, 'import numpy as np\n'), ((2054, 2091), 'preprocess.nan_upper_bound', 'nan_upper_bound', (['rgb', '(250, 250, 250)'], {}), '(rgb, (250, 250, 250))\n', (2069, 2091), False, 'from preprocess import nan_upper_bound\n'), ((2182, 2251), 'numpy.allclose', 'np.allclose', (['actual', 'expected'], {'rtol': '(1e-05)', 'atol': '(1e-08)', 'equal_nan': '(True)'}), '(actual, expected, rtol=1e-05, atol=1e-08, equal_nan=True)\n', (2193, 2251), True, 'import numpy as np\n'), ((2386, 2406), 'numpy.ones', 'np.ones', (['(10, 10, 3)'], {}), '((10, 10, 3))\n', (2393, 2406), True, 'import numpy as np\n'), ((2452, 2474), 'preprocess.nan_yellow_pixels', 'nan_yellow_pixels', (['rgb'], {}), '(rgb)\n', (2469, 2474), False, 'from preprocess import nan_yellow_pixels\n'), ((2487, 2509), 'numpy.isnan', 'np.isnan', (['rgb[1, 1, 1]'], {}), '(rgb[1, 1, 1])\n', (2495, 2509), True, 'import numpy as np\n'), ((2521, 2546), 'numpy.isfinite', 'np.isfinite', (['rgb[0, 0, 2]'], {}), '(rgb[0, 0, 2])\n', (2532, 2546), True, 'import numpy as np\n'), ((2706, 2769), 'preprocess.rgb_preprocess', 'rgb_preprocess', (['rgb'], {'exclude_bg': '(True)', 'upper_lim': '(200, 200, 200)'}), '(rgb, exclude_bg=True, upper_lim=(200, 200, 200))\n', (2720, 2769), False, 'from preprocess import rgb_histogram, rgb_preprocess\n'), ((2787, 2820), 'preprocess.rgb_histogram', 'rgb_histogram', (['rgb'], {'process': '(False)'}), '(rgb, process=False)\n', (2800, 2820), False, 'from preprocess import rgb_histogram, rgb_preprocess\n'), ((2839, 2852), 'numpy.zeros', 'np.zeros', (['(256)'], {}), '(256)\n', (2847, 2852), True, 'import numpy as np\n'), ((2896, 2909), 'numpy.zeros', 'np.zeros', (['(256)'], {}), '(256)\n', (2904, 2909), True, 'import numpy as np\n'), ((2951, 2964), 'numpy.zeros', 'np.zeros', (['(256)'], {}), '(256)\n', (2959, 2964), True, 'import numpy as np\n'), ((3025, 3056), 'numpy.array_equal', 'np.array_equal', (['rh', 'expected_rh'], {}), '(rh, expected_rh)\n', (3039, 3056), True, 'import numpy as np\n'), ((3068, 3099), 'numpy.array_equal', 'np.array_equal', (['gh', 'expected_gh'], {}), '(gh, expected_gh)\n', (3082, 3099), True, 'import numpy as np\n'), ((3111, 3142), 'numpy.array_equal', 'np.array_equal', (['bh', 'expected_bh'], {}), '(bh, expected_bh)\n', (3125, 3142), True, 'import numpy as np\n'), ((3161, 3193), 'preprocess.rgb_histogram', 'rgb_histogram', (['rgb'], {'process': '(True)'}), '(rgb, process=True)\n', (3174, 3193), False, 'from preprocess import rgb_histogram, rgb_preprocess\n'), ((3212, 3225), 'numpy.zeros', 'np.zeros', (['(256)'], {}), '(256)\n', (3220, 3225), True, 'import numpy as np\n'), ((3269, 3282), 'numpy.zeros', 'np.zeros', (['(256)'], {}), '(256)\n', (3277, 3282), True, 'import numpy as np\n'), ((3324, 3337), 'numpy.zeros', 'np.zeros', (['(256)'], {}), '(256)\n', (3332, 3337), True, 'import numpy as np\n'), ((3402, 3433), 'numpy.array_equal', 'np.array_equal', (['rh', 'expected_rh'], {}), '(rh, expected_rh)\n', (3416, 3433), True, 'import numpy as np\n'), ((3445, 3476), 'numpy.array_equal', 'np.array_equal', (['gh', 'expected_gh'], {}), '(gh, expected_gh)\n', (3459, 3476), True, 'import numpy as np\n'), ((3488, 3519), 'numpy.array_equal', 'np.array_equal', (['bh', 'expected_bh'], {}), '(bh, expected_bh)\n', (3502, 3519), True, 'import numpy as np\n'), ((565, 583), 'numpy.ones', 'np.ones', (['(1, w, 3)'], {}), '((1, w, 3))\n', (572, 583), True, 'import numpy as np\n'), ((616, 643), 'PIL.Image.fromarray', 'Image.fromarray', (['rgb', '"""RGB"""'], {}), "(rgb, 'RGB')\n", (631, 643), False, 'from PIL import Image\n'), ((1718, 1737), 'numpy.empty', 'np.empty', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (1726, 1737), True, 'import numpy as np\n'), ((1990, 2008), 'numpy.ones', 'np.ones', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (1997, 2008), True, 'import numpy as np\n'), ((2114, 2133), 'numpy.empty', 'np.empty', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (2122, 2133), True, 'import numpy as np\n')]
|
# Released under The MIT License (MIT)
# http://opensource.org/licenses/MIT
# Copyright (c) 2013-2015 SCoT Development Team
import unittest
import numpy as np
from numpy.testing import assert_allclose
from scot.varbase import VARBase as VAR
from scot.datatools import acm
epsilon = 1e-10
class TestVAR(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def generate_data(self, cc=((1, 0), (0, 1))):
var = VAR(2)
var.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
l = (1000, 100)
x = var.simulate(l, lambda: np.random.randn(2).dot(cc))
self.assertEqual(x.shape, (l[1], 2, l[0]))
return x, var
def test_abstract(self):
self.assertRaises(NotImplementedError, VAR(1).fit, [None])
self.assertRaises(NotImplementedError, VAR(1).optimize, [None])
def test_simulate(self):
noisefunc = lambda: [1, 1] # use deterministic function instead of noise
num_samples = 100
b = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
var = VAR(2)
var.coef = b
np.random.seed(42)
x = var.simulate(num_samples, noisefunc)
self.assertEqual(x.shape, (1, b.shape[0], num_samples))
# make sure we got expected values within reasonable accuracy
for n in range(10, num_samples):
self.assertTrue(np.all(
np.abs(x[0, :, n] - 1
- np.dot(x[0, :, n - 1], b[:, 0::2].T)
- np.dot(x[0, :, n - 2], b[:, 1::2].T)) < 1e-10))
def test_predict(self):
np.random.seed(777)
x, var = self.generate_data()
z = var.predict(x)
self.assertTrue(np.abs(np.var(x[:, :, 100:] - z[:, :, 100:]) - 1) < 0.005)
def test_yulewalker(self):
np.random.seed(7353)
x, var0 = self.generate_data([[1, 2], [3, 4]])
acms = [acm(x, l) for l in range(var0.p+1)]
var = VAR(var0.p)
var.from_yw(acms)
assert_allclose(var0.coef, var.coef, rtol=1e-2, atol=1e-2)
# that limit is rather generous, but we don't want tests to fail due to random variation
self.assertTrue(np.all(np.abs(var0.coef - var.coef) < 0.02))
self.assertTrue(np.all(np.abs(var0.rescov - var.rescov) < 0.02))
def test_whiteness(self):
np.random.seed(91)
r = np.random.randn(80, 15, 100) # gaussian white noise
r0 = r.copy()
var = VAR(0, n_jobs=-1)
var.residuals = r
p = var.test_whiteness(20, random_state=1)
self.assertTrue(np.all(r == r0)) # make sure we don't modify the input
self.assertGreater(p, 0.01) # test should be non-significant for white noise
r[:, 1, 3:] = r[:, 0, :-3] # create cross-correlation at lag 3
p = var.test_whiteness(20)
self.assertLessEqual(p, 0.01) # now test should be significant
def test_stable(self):
var = VAR(1)
# Stable AR model -- rule of thumb: sum(coefs) < 1
var.coef = np.asarray([[0.5, 0.3]])
self.assertTrue(var.is_stable())
# Unstable AR model -- rule of thumb: sum(coefs) > 1
var.coef = np.asarray([[0.5, 0.7]])
self.assertFalse(var.is_stable())
|
[
"numpy.random.seed",
"numpy.abs",
"numpy.random.randn",
"scot.varbase.VARBase",
"numpy.asarray",
"numpy.array",
"scot.datatools.acm",
"numpy.dot",
"numpy.testing.assert_allclose",
"numpy.var",
"numpy.all"
] |
[((465, 471), 'scot.varbase.VARBase', 'VAR', (['(2)'], {}), '(2)\n', (468, 471), True, 'from scot.varbase import VARBase as VAR\n'), ((491, 545), 'numpy.array', 'np.array', (['[[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]]'], {}), '([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])\n', (499, 545), True, 'import numpy as np\n'), ((1028, 1082), 'numpy.array', 'np.array', (['[[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]]'], {}), '([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])\n', (1036, 1082), True, 'import numpy as np\n'), ((1098, 1104), 'scot.varbase.VARBase', 'VAR', (['(2)'], {}), '(2)\n', (1101, 1104), True, 'from scot.varbase import VARBase as VAR\n'), ((1135, 1153), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (1149, 1153), True, 'import numpy as np\n'), ((1625, 1644), 'numpy.random.seed', 'np.random.seed', (['(777)'], {}), '(777)\n', (1639, 1644), True, 'import numpy as np\n'), ((1833, 1853), 'numpy.random.seed', 'np.random.seed', (['(7353)'], {}), '(7353)\n', (1847, 1853), True, 'import numpy as np\n'), ((1977, 1988), 'scot.varbase.VARBase', 'VAR', (['var0.p'], {}), '(var0.p)\n', (1980, 1988), True, 'from scot.varbase import VARBase as VAR\n'), ((2024, 2082), 'numpy.testing.assert_allclose', 'assert_allclose', (['var0.coef', 'var.coef'], {'rtol': '(0.01)', 'atol': '(0.01)'}), '(var0.coef, var.coef, rtol=0.01, atol=0.01)\n', (2039, 2082), False, 'from numpy.testing import assert_allclose\n'), ((2362, 2380), 'numpy.random.seed', 'np.random.seed', (['(91)'], {}), '(91)\n', (2376, 2380), True, 'import numpy as np\n'), ((2393, 2421), 'numpy.random.randn', 'np.random.randn', (['(80)', '(15)', '(100)'], {}), '(80, 15, 100)\n', (2408, 2421), True, 'import numpy as np\n'), ((2486, 2503), 'scot.varbase.VARBase', 'VAR', (['(0)'], {'n_jobs': '(-1)'}), '(0, n_jobs=-1)\n', (2489, 2503), True, 'from scot.varbase import VARBase as VAR\n'), ((2993, 2999), 'scot.varbase.VARBase', 'VAR', (['(1)'], {}), '(1)\n', (2996, 2999), True, 'from scot.varbase import VARBase as VAR\n'), ((3079, 3103), 'numpy.asarray', 'np.asarray', (['[[0.5, 0.3]]'], {}), '([[0.5, 0.3]])\n', (3089, 3103), True, 'import numpy as np\n'), ((3226, 3250), 'numpy.asarray', 'np.asarray', (['[[0.5, 0.7]]'], {}), '([[0.5, 0.7]])\n', (3236, 3250), True, 'import numpy as np\n'), ((1926, 1935), 'scot.datatools.acm', 'acm', (['x', 'l'], {}), '(x, l)\n', (1929, 1935), False, 'from scot.datatools import acm\n'), ((2607, 2622), 'numpy.all', 'np.all', (['(r == r0)'], {}), '(r == r0)\n', (2613, 2622), True, 'import numpy as np\n'), ((784, 790), 'scot.varbase.VARBase', 'VAR', (['(1)'], {}), '(1)\n', (787, 790), True, 'from scot.varbase import VARBase as VAR\n'), ((851, 857), 'scot.varbase.VARBase', 'VAR', (['(1)'], {}), '(1)\n', (854, 857), True, 'from scot.varbase import VARBase as VAR\n'), ((2212, 2240), 'numpy.abs', 'np.abs', (['(var0.coef - var.coef)'], {}), '(var0.coef - var.coef)\n', (2218, 2240), True, 'import numpy as np\n'), ((2281, 2313), 'numpy.abs', 'np.abs', (['(var0.rescov - var.rescov)'], {}), '(var0.rescov - var.rescov)\n', (2287, 2313), True, 'import numpy as np\n'), ((606, 624), 'numpy.random.randn', 'np.random.randn', (['(2)'], {}), '(2)\n', (621, 624), True, 'import numpy as np\n'), ((1741, 1778), 'numpy.var', 'np.var', (['(x[:, :, 100:] - z[:, :, 100:])'], {}), '(x[:, :, 100:] - z[:, :, 100:])\n', (1747, 1778), True, 'import numpy as np\n'), ((1540, 1576), 'numpy.dot', 'np.dot', (['x[0, :, n - 2]', 'b[:, 1::2].T'], {}), '(x[0, :, n - 2], b[:, 1::2].T)\n', (1546, 1576), True, 'import numpy as np\n'), ((1478, 1514), 'numpy.dot', 'np.dot', (['x[0, :, n - 1]', 'b[:, 0::2].T'], {}), '(x[0, :, n - 1], b[:, 0::2].T)\n', (1484, 1514), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# =============================================================================
# Here we will be testing with mixtures of student-t, 1d
# =============================================================================
import sys
sys.path.insert(0,"../../src2")
import math
import functools
import torch
import numpy as np
from scipy.special import gamma
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from src.variational_boosting_bmc import VariationalBoosting
from src import vb_utils
from src import sampling
np.random.seed(100)
torch.manual_seed(100)
ninit = 5
training_interval = 1000
acquisition = ["uncertainty_sampling","prospective","mmlt","mmlt_prospective"][3]
data = np.load("ex3a_acq_%s_data/tracking.npz"%(acquisition),allow_pickle=True)
means = data["means"]
covs = data["covs"]
elbo = data["elbo"]
steps = data["steps"]
true_posterior = data["true_posterior"]
bmc_pred = data["bmc_pred"]
vb_posterior = data["vb_posterior"]
xplot = data["xplot"]
samples = data["samples"]
evaluations = data["evaluations"]
#iters = np.arange(len(dmeans))
fig1,ax1 = plt.subplots()
color = 'tab:blue'
ax1.plot(steps,np.log10(means),color=color,marker='.',linestyle=' ')
ax1.set_xlabel("iteration")
ax1.set_ylabel(r"$\log_{10}|\mu - \mu_0|_2$",color=color)
ax1.set_ylim(bottom=-2.5,top=1.0)
ax1.tick_params(axis='y', labelcolor=color)
ax2 = ax1.twinx()
color = 'tab:red'
ax2.plot(steps,np.log10(covs/25.0316),color=color,marker='x',linestyle=' ')
ax2.set_ylabel(r"$\log_{10} |\sigma^2 - \sigma^2_0|_2/|\sigma^2_0|$",color=color)
ax2.set_ylim(bottom=-4.0,top=1.0)
ax2.tick_params(axis='y', labelcolor=color)
fig1.savefig("../../tex/figs/dmcil1g_aq_%s.png"%acquisition)
fig3,ax3 = plt.subplots()
#inds_of_interest = [0,5,10,1]]
inds_of_interest = [50]
for i,ind in enumerate(inds_of_interest):
alpha = 0.8*(i+1)/len(inds_of_interest)+0.2
ax3.plot(xplot,np.exp(vb_posterior[ind,:]),linestyle='--',alpha=1.0,
label="Variational distribution")
ax3.plot(xplot,np.exp(bmc_pred),label="GP prediction")
ax3.plot(xplot,np.exp(true_posterior),"b-",label="True distribution")
ax3.legend()
ax3.set_xlabel("x")
ax3.set_ylabel("f(x)")
fig3.savefig("../../tex/figs/convgraphil1g_aq_%s.png"%acquisition)
fig4,ax4 = plt.subplots()
ax4.plot(samples[:ninit],np.exp(evaluations)[:ninit],'ro',
samples[ninit:],np.exp(evaluations)[ninit:],'bx')
ax4.set_xlim([-20,20])
ax4.set_xlabel("x")
ax4.set_ylabel("f(x)")
ax4.legend(["Initial sampling","Active sampling"])
#fig4.savefig("../../tex/figs/explopattern1g_aq_%s.png"%acquisition)
|
[
"numpy.load",
"numpy.random.seed",
"torch.manual_seed",
"sys.path.insert",
"numpy.exp",
"numpy.log10",
"matplotlib.pyplot.subplots"
] |
[((253, 285), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../../src2"""'], {}), "(0, '../../src2')\n", (268, 285), False, 'import sys\n'), ((564, 583), 'numpy.random.seed', 'np.random.seed', (['(100)'], {}), '(100)\n', (578, 583), True, 'import numpy as np\n'), ((584, 606), 'torch.manual_seed', 'torch.manual_seed', (['(100)'], {}), '(100)\n', (601, 606), False, 'import torch\n'), ((732, 805), 'numpy.load', 'np.load', (["('ex3a_acq_%s_data/tracking.npz' % acquisition)"], {'allow_pickle': '(True)'}), "('ex3a_acq_%s_data/tracking.npz' % acquisition, allow_pickle=True)\n", (739, 805), True, 'import numpy as np\n'), ((1119, 1133), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1131, 1133), True, 'import matplotlib.pyplot as plt\n'), ((1731, 1745), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1743, 1745), True, 'import matplotlib.pyplot as plt\n'), ((2277, 2291), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2289, 2291), True, 'import matplotlib.pyplot as plt\n'), ((1168, 1183), 'numpy.log10', 'np.log10', (['means'], {}), '(means)\n', (1176, 1183), True, 'import numpy as np\n'), ((1437, 1461), 'numpy.log10', 'np.log10', (['(covs / 25.0316)'], {}), '(covs / 25.0316)\n', (1445, 1461), True, 'import numpy as np\n'), ((2087, 2109), 'numpy.exp', 'np.exp', (['true_posterior'], {}), '(true_posterior)\n', (2093, 2109), True, 'import numpy as np\n'), ((1912, 1940), 'numpy.exp', 'np.exp', (['vb_posterior[ind, :]'], {}), '(vb_posterior[ind, :])\n', (1918, 1940), True, 'import numpy as np\n'), ((2032, 2048), 'numpy.exp', 'np.exp', (['bmc_pred'], {}), '(bmc_pred)\n', (2038, 2048), True, 'import numpy as np\n'), ((2317, 2336), 'numpy.exp', 'np.exp', (['evaluations'], {}), '(evaluations)\n', (2323, 2336), True, 'import numpy as np\n'), ((2376, 2395), 'numpy.exp', 'np.exp', (['evaluations'], {}), '(evaluations)\n', (2382, 2395), True, 'import numpy as np\n')]
|
# vim: expandtab:ts=4:sw=4
from __future__ import absolute_import
import numpy as np
from linear_assignment import min_marg_matching
import pdb
def get_unmatched(all_idx, matches, i, marginalization=None):
assigned = [match[i] for match in matches]
unmatched = set(all_idx) - set(assigned)
if marginalization is not None:
# from 1 for dummy node
in_gate_dets = np.nonzero(np.sum(
marginalization[:, 1:], axis=0))[0].tolist()
unmatched = [d for d in unmatched if d not in in_gate_dets]
return list(unmatched)
class Matcher:
def __init__(self, detections, marginalizations, confirmed_tracks,
matching_strategy,
assignment_threshold=None):
self.detections = detections
self.marginalizations = marginalizations
self.confirmed_tracks = confirmed_tracks
self.assignment_threshold = assignment_threshold
self.detection_indices = np.arange(len(detections))
self.matching_strategy = matching_strategy
def match(self):
self.get_matches()
self.get_unmatched_tracks()
self.get_unmatched_detections()
return self.matches, self.unmatched_tracks, self.unmatched_detections
def get_matches(self):
if self.matching_strategy == "max_and_threshold":
self.max_and_threshold_matching()
elif self.matching_strategy == "hungarian":
self.hungarian()
elif self.matching_strategy == "max_match":
self.max_match()
elif self.matching_strategy == "none":
self.matches = []
else:
raise Exception('Unrecognized matching strategy: {}'.
format(self.matching_strategy))
def get_unmatched_tracks(self):
self.unmatched_tracks = get_unmatched(self.confirmed_tracks,
self.matches, 0)
def get_unmatched_detections(self):
self.unmatched_detections = get_unmatched(self.detection_indices, self.matches, 1, self.marginalizations)
def max_match(self):
self.matches = []
if self.marginalizations.shape[0] == 0:
return
detection_map = {}
for i, track_idx in enumerate(self.confirmed_tracks):
marginalization = self.marginalizations[i,:]
detection_id = np.argmax(marginalization) - 1 # subtract one for dummy
if detection_id < 0:
continue
if detection_id not in detection_map.keys():
detection_map[detection_id] = track_idx
else:
cur_track = detection_map[detection_id]
track_update = track_idx if self.marginalizations[track_idx, detection_id] > self.marginalizations[cur_track, detection_id] else cur_track
detection_map[detection_id] = track_update
threshold_p = marginalization[detection_id + 1]
if threshold_p < self.assignment_threshold:
continue
for detection in detection_map.keys():
self.matches.append((detection_map[detection], detection))
def max_and_threshold_matching(self):
self.matches = []
if self.marginalizations.shape[0] == 0:
return
for i, track_idx in enumerate(self.confirmed_tracks):
marginalization = self.marginalizations[i,:]
detection_id = np.argmax(marginalization) - 1 # subtract one for dummy
if detection_id < 0:
continue
threshold_p = marginalization[detection_id + 1]
if threshold_p < self.assignment_threshold:
continue
self.matches.append((track_idx, detection_id))
def hungarian(self):
self.matches, _, _ = min_marg_matching(self.marginalizations,
self.confirmed_tracks,
self.assignment_threshold)
|
[
"linear_assignment.min_marg_matching",
"numpy.sum",
"numpy.argmax"
] |
[((3803, 3898), 'linear_assignment.min_marg_matching', 'min_marg_matching', (['self.marginalizations', 'self.confirmed_tracks', 'self.assignment_threshold'], {}), '(self.marginalizations, self.confirmed_tracks, self.\n assignment_threshold)\n', (3820, 3898), False, 'from linear_assignment import min_marg_matching\n'), ((2368, 2394), 'numpy.argmax', 'np.argmax', (['marginalization'], {}), '(marginalization)\n', (2377, 2394), True, 'import numpy as np\n'), ((3430, 3456), 'numpy.argmax', 'np.argmax', (['marginalization'], {}), '(marginalization)\n', (3439, 3456), True, 'import numpy as np\n'), ((402, 440), 'numpy.sum', 'np.sum', (['marginalization[:, 1:]'], {'axis': '(0)'}), '(marginalization[:, 1:], axis=0)\n', (408, 440), True, 'import numpy as np\n')]
|
import numpy as np
import torch
import scipy as sp
# integrated gradients
def integrated_gradients(inputs, model, target_label_idx, predict_and_gradients, steps=50, device=None, baseline=None,
path=None):
if device and "cpu" in inputs.device.type:
inputs = inputs.to(device)
if baseline is None:
baseline = torch.rand_like(inputs, device=device)
elif device and "cpu" in baseline.device.type:
baseline = baseline.to(device)
if path is None:
# scale inputs and compute gradients
scaled_inputs = [baseline + (float(i) / steps) * (inputs - baseline) for i in range(0, steps + 1)]
else:
scaled_inputs = [p.to(device) for p in path]
grads, _ = predict_and_gradients(scaled_inputs, model, target_label_idx, device)
grads = (grads[:-1] + grads[1:]) / 2.0
avg_grads = np.average(grads, axis=0)
avg_grads = np.transpose(avg_grads, (1, 2, 0))
integrated_grad = (inputs.cpu() - baseline.cpu()).squeeze(0).permute(1, 2, 0).numpy() * avg_grads
return integrated_grad, grads, baseline
# TODO: play around with other integration methods/paths
def integrate_gradients(inputs, model, target_label_idx, predict_and_gradients, steps=50, device=None, baseline=None,
path=None):
if baseline is None:
baseline = torch.rand_like(inputs)
def path_step(alpha):
diff = (inputs - baseline)
pimg = baseline + alpha * diff
grads, _ = predict_and_gradients(pimg, model, target_label_idx, device)
# scale inputs and compute gradients
scaled_inputs = [baseline + (float(i) / steps) * (inputs - baseline) for i in range(0, steps + 1)]
grads, _ = predict_and_gradients(scaled_inputs, model, target_label_idx, device)
# average along scaling path, with equal weighting => riemman approx as in paper
avg_grads = np.average(grads[:-1], axis=0)
# move channel from first to last for display
avg_grads = np.transpose(avg_grads, (1, 2, 0))
integrated_grad = (inputs - baseline).squeeze(0).permute(1, 2, 0).numpy() * avg_grads
return integrated_grad, grads, baseline
def random_baseline_integrated_gradients(inputs, model, target_label_idx, predict_and_gradients, steps,
num_random_trials, device, baselines=None, paths=None):
all_intgrads = []
trial_grads = []
if baselines is None:
bl = []
else:
bl = baselines
for i in range(num_random_trials):
if baselines is None:
b = None
else:
b = baselines[i]
if paths is None:
p = None
else:
p = paths[i]
integrated_grad, grads, baseline = integrated_gradients(inputs, model, target_label_idx, predict_and_gradients,
steps=steps, device=device, baseline=b, path=p)
all_intgrads.append(integrated_grad)
trial_grads.append(grads)
if baselines is None:
bl.append(baseline)
print('the trial number is: {}'.format(i))
avg_intgrads = np.average(np.array(all_intgrads), axis=0)
return avg_intgrads, trial_grads, bl
|
[
"numpy.array",
"numpy.transpose",
"torch.rand_like",
"numpy.average"
] |
[((867, 892), 'numpy.average', 'np.average', (['grads'], {'axis': '(0)'}), '(grads, axis=0)\n', (877, 892), True, 'import numpy as np\n'), ((909, 943), 'numpy.transpose', 'np.transpose', (['avg_grads', '(1, 2, 0)'], {}), '(avg_grads, (1, 2, 0))\n', (921, 943), True, 'import numpy as np\n'), ((1883, 1913), 'numpy.average', 'np.average', (['grads[:-1]'], {'axis': '(0)'}), '(grads[:-1], axis=0)\n', (1893, 1913), True, 'import numpy as np\n'), ((1980, 2014), 'numpy.transpose', 'np.transpose', (['avg_grads', '(1, 2, 0)'], {}), '(avg_grads, (1, 2, 0))\n', (1992, 2014), True, 'import numpy as np\n'), ((358, 396), 'torch.rand_like', 'torch.rand_like', (['inputs'], {'device': 'device'}), '(inputs, device=device)\n', (373, 396), False, 'import torch\n'), ((1347, 1370), 'torch.rand_like', 'torch.rand_like', (['inputs'], {}), '(inputs)\n', (1362, 1370), False, 'import torch\n'), ((3143, 3165), 'numpy.array', 'np.array', (['all_intgrads'], {}), '(all_intgrads)\n', (3151, 3165), True, 'import numpy as np\n')]
|
import pytest
import numpy as np
import numpy.testing as nptest
from ellipse import LsqEllipse
def make_dataset(center, width, height, phi, n_points):
"""Generate Elliptical data with noise"""
t = np.linspace(0, 2 * np.pi, n_points)
x = (center[0]
+ width * np.cos(t) * np.cos(phi)
- height * np.sin(t) * np.sin(phi))
y = (center[1]
+ width * np.cos(t) * np.sin(phi)
+ height * np.sin(t) * np.cos(phi))
return np.c_[x, y]
# phi needs to be < (1/4 * pi) and width != height or test is degenerate
@pytest.mark.parametrize('center', [[1, 1], [0, 1]])
@pytest.mark.parametrize('width', [.4, 10])
@pytest.mark.parametrize('height', [.2, 3])
@pytest.mark.parametrize('phi', [np.pi / 5, np.pi / 13])
def test_ellipse_fit(center, width, height, phi):
X = make_dataset(
center=center,
width=width,
height=height,
phi=phi,
n_points=10
)
elp = LsqEllipse()
elp.fit(X)
_center, _width, _height, _phi = elp.as_parameters()
nptest.assert_array_almost_equal(_center, center)
nptest.assert_almost_equal(_width, width)
nptest.assert_almost_equal(_height, height)
nptest.assert_almost_equal(_phi, phi)
def test_minimum_data_points():
X = make_dataset(
center=[0, 0],
width=1,
height=.5,
phi=0,
n_points=5
)
elp = LsqEllipse()
elp.fit(X)
_center, _width, _height, _phi = elp.as_parameters()
nptest.assert_array_almost_equal(_center, [0, 0])
nptest.assert_almost_equal(_width, 1)
nptest.assert_almost_equal(_height, .5)
nptest.assert_almost_equal(_phi, 0)
def test_less_than_minimum_data_points_raises_err():
X = make_dataset(
center=[0, 0],
width=1,
height=.5,
phi=0,
n_points=4
)
elp = LsqEllipse()
with pytest.raises(ValueError):
elp.fit(X)
@pytest.mark.parametrize('n_points', [5, 100])
def test_return_fit_returns_correct_ellipse(n_points):
X = make_dataset(
center=[0, 0],
width=1,
height=.5,
phi=0,
n_points=n_points
)
elp = LsqEllipse().fit(X)
x = elp.return_fit(n_points)
nptest.assert_array_almost_equal(x, X)
|
[
"numpy.testing.assert_almost_equal",
"ellipse.LsqEllipse",
"pytest.raises",
"numpy.sin",
"numpy.linspace",
"numpy.cos",
"pytest.mark.parametrize",
"numpy.testing.assert_array_almost_equal"
] |
[((560, 611), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""center"""', '[[1, 1], [0, 1]]'], {}), "('center', [[1, 1], [0, 1]])\n", (583, 611), False, 'import pytest\n'), ((613, 656), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""width"""', '[0.4, 10]'], {}), "('width', [0.4, 10])\n", (636, 656), False, 'import pytest\n'), ((657, 700), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""height"""', '[0.2, 3]'], {}), "('height', [0.2, 3])\n", (680, 700), False, 'import pytest\n'), ((701, 756), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""phi"""', '[np.pi / 5, np.pi / 13]'], {}), "('phi', [np.pi / 5, np.pi / 13])\n", (724, 756), False, 'import pytest\n'), ((1913, 1958), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""n_points"""', '[5, 100]'], {}), "('n_points', [5, 100])\n", (1936, 1958), False, 'import pytest\n'), ((209, 244), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', 'n_points'], {}), '(0, 2 * np.pi, n_points)\n', (220, 244), True, 'import numpy as np\n'), ((949, 961), 'ellipse.LsqEllipse', 'LsqEllipse', ([], {}), '()\n', (959, 961), False, 'from ellipse import LsqEllipse\n'), ((1039, 1088), 'numpy.testing.assert_array_almost_equal', 'nptest.assert_array_almost_equal', (['_center', 'center'], {}), '(_center, center)\n', (1071, 1088), True, 'import numpy.testing as nptest\n'), ((1093, 1134), 'numpy.testing.assert_almost_equal', 'nptest.assert_almost_equal', (['_width', 'width'], {}), '(_width, width)\n', (1119, 1134), True, 'import numpy.testing as nptest\n'), ((1139, 1182), 'numpy.testing.assert_almost_equal', 'nptest.assert_almost_equal', (['_height', 'height'], {}), '(_height, height)\n', (1165, 1182), True, 'import numpy.testing as nptest\n'), ((1187, 1224), 'numpy.testing.assert_almost_equal', 'nptest.assert_almost_equal', (['_phi', 'phi'], {}), '(_phi, phi)\n', (1213, 1224), True, 'import numpy.testing as nptest\n'), ((1390, 1402), 'ellipse.LsqEllipse', 'LsqEllipse', ([], {}), '()\n', (1400, 1402), False, 'from ellipse import LsqEllipse\n'), ((1480, 1529), 'numpy.testing.assert_array_almost_equal', 'nptest.assert_array_almost_equal', (['_center', '[0, 0]'], {}), '(_center, [0, 0])\n', (1512, 1529), True, 'import numpy.testing as nptest\n'), ((1534, 1571), 'numpy.testing.assert_almost_equal', 'nptest.assert_almost_equal', (['_width', '(1)'], {}), '(_width, 1)\n', (1560, 1571), True, 'import numpy.testing as nptest\n'), ((1576, 1616), 'numpy.testing.assert_almost_equal', 'nptest.assert_almost_equal', (['_height', '(0.5)'], {}), '(_height, 0.5)\n', (1602, 1616), True, 'import numpy.testing as nptest\n'), ((1620, 1655), 'numpy.testing.assert_almost_equal', 'nptest.assert_almost_equal', (['_phi', '(0)'], {}), '(_phi, 0)\n', (1646, 1655), True, 'import numpy.testing as nptest\n'), ((1842, 1854), 'ellipse.LsqEllipse', 'LsqEllipse', ([], {}), '()\n', (1852, 1854), False, 'from ellipse import LsqEllipse\n'), ((2211, 2249), 'numpy.testing.assert_array_almost_equal', 'nptest.assert_array_almost_equal', (['x', 'X'], {}), '(x, X)\n', (2243, 2249), True, 'import numpy.testing as nptest\n'), ((1864, 1889), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1877, 1889), False, 'import pytest\n'), ((340, 351), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (346, 351), True, 'import numpy as np\n'), ((447, 458), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (453, 458), True, 'import numpy as np\n'), ((2153, 2165), 'ellipse.LsqEllipse', 'LsqEllipse', ([], {}), '()\n', (2163, 2165), False, 'from ellipse import LsqEllipse\n'), ((296, 307), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (302, 307), True, 'import numpy as np\n'), ((328, 337), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (334, 337), True, 'import numpy as np\n'), ((403, 414), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (409, 414), True, 'import numpy as np\n'), ((435, 444), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (441, 444), True, 'import numpy as np\n'), ((284, 293), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (290, 293), True, 'import numpy as np\n'), ((391, 400), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (397, 400), True, 'import numpy as np\n')]
|
from .utils import sigmoid, tanh
import numpy as np
class LSTM:
def __init__(self, input_shape, output_shape):
self.input_shape = input_shape
self.layer_1 = self.Unit(input_shape, output_shape)
def forward(self, x=.34):
print(self.unit(self.cell_state, self.output_gate, x))
class Unit:
def __init__(self, input_shape, output_shape):
self.forget_weights = np.random.random((input_shape, output_shape))
self.cell_state = np.random.random((input_shape))
self.output_gate = np.random.random(output_shape)
def forward(self, x):
np.zeros_like
def unit(self, cell_state, output_gate, input_gate):
dot_prod = np.dot(self.forget_weights, np.dot(output_gate, input_gate))
f_t = sigmoid(dot_prod)
cell_state = cell_state * f_t
return cell_state
class ClassicRNN:
def __init__(self, input_shape, output_shape, hidden_dim=10, bptt_truncate=5, learning_rate=0.0001,
min_clip_value=-1, max_clip_value=1):
self.hidden_dim = hidden_dim
self.bptt_truncate = bptt_truncate
self.learning_rate = learning_rate
self.min_clip_value = min_clip_value
self.max_clip_value = max_clip_value
self.input_shape = input_shape
self.input_weights = np.random.uniform(0, 1, (self.hidden_dim, self.input_shape))
self.hidden_weights = np.random.uniform(0, 1, (self.hidden_dim, self.hidden_dim))
self.output_weights = np.random.uniform(0, 1, (output_shape, self.hidden_dim))
self.output = np.zeros((self.hidden_dim, output_shape))
self.layer_1 = self.Unit(input_shape, output_shape)
def forward(self, x, y):
"""
Taken from Goodfellow et. al's forward propagation equations 10.8-10-10 in Chapter RNN 10.2, ignoring the biases
U - input_weights - input to hidden
W - hidden_weights - hidden to hidden
V - output_weights - hidden to output
:param x:
:param y:
:return:
"""
new_input = np.zeros(x.shape)
for i in range(self.input_shape):
new_input[i] = x[i]
# 10.8
input_prod = np.dot(self.input_weights, new_input)
previous_output_prod = np.dot(self.hidden_weights, self.output)
prod = input_prod + previous_output_prod
# 10.9
out = tanh(prod)
# 10.10
mulv = np.dot(self.output_weights, out)
self.output = out
loss = (y - mulv) ** 2 / 2
return loss, mulv
def forward_backward(self, x, y):
layers = []
dU = np.zeros(self.input_weights.shape)
dV = np.zeros(self.output_weights.shape)
dW = np.zeros(self.hidden_weights.shape)
dU_t = np.zeros(self.input_weights.shape)
dV_t = np.zeros(self.output_weights.shape)
dW_t = np.zeros(self.hidden_weights.shape)
dU_i = np.zeros(self.input_weights.shape)
dW_i = np.zeros(self.hidden_weights.shape)
for i in range(self.input_shape):
new_input = np.zeros(x.shape)
new_input[i] = x[i]
input_prod = np.dot(self.input_weights, new_input)
previous_output_prod = np.dot(self.hidden_weights, self.output)
prod = input_prod + previous_output_prod
out = sigmoid(prod)
mulv = np.dot(self.output_weights, out)
layers.append({'out': out, 'prev_out': self.output})
self.output = out
dmulv = (mulv - y)
# backward pass
for t in range(self.input_shape):
dV_t = np.dot(dmulv, np.transpose(layers[t]['out']))
dsv = np.dot(np.transpose(self.output_weights), dmulv)
ds = dsv
dadd = prod * (1 - prod) * ds
dmulw = dadd * np.ones_like(previous_output_prod)
dprev_s = np.dot(np.transpose(self.hidden_weights), dmulw)
for i in range(t - 1, max(-1, t - self.bptt_truncate - 1), -1):
ds = dsv + dprev_s
dadd = prod * (1 - prod) * ds
dmulw = dadd * np.ones_like(previous_output_prod)
dmulu = dadd * np.ones_like(input_prod)
dW_i = np.dot(self.hidden_weights, layers[t]['prev_out'])
dprev_s = np.dot(np.transpose(self.hidden_weights), dmulw)
new_input = np.zeros(x.shape)
new_input[t] = x[t]
dU_i = np.dot(self.input_weights, new_input)
dx = np.dot(np.transpose(self.input_weights), dmulu)
dU_t += dU_i
dW_t += dW_i
dV += dV_t
dU += dU_t
dW += dW_t
if dU.max() > self.max_clip_value:
dU[dU > self.max_clip_value] = self.max_clip_value
if dV.max() > self.max_clip_value:
dV[dV > self.max_clip_value] = self.max_clip_value
if dW.max() > self.max_clip_value:
dW[dW > self.max_clip_value] = self.max_clip_value
if dU.min() < self.min_clip_value:
dU[dU < self.min_clip_value] = self.min_clip_value
if dV.min() < self.min_clip_value:
dV[dV < self.min_clip_value] = self.min_clip_value
if dW.min() < self.min_clip_value:
dW[dW < self.min_clip_value] = self.min_clip_value
# update
self.input_weights -= self.learning_rate * dU
self.output_weights -= self.learning_rate * dV
self.hidden_weights -= self.learning_rate * dW
loss = (y - mulv) ** 2 / 2
return loss
class Unit:
def __init__(self, input_shape, output_shape):
self.forget_weights = np.random.random((input_shape, output_shape))
self.output_gate = np.random.random(output_shape)
def forward(self, x):
self.output_gate = self.unit(self.output_gate, x)
return self.output_gate
def unit(self, output_gate, input_gate):
dot_prod = np.dot(self.forget_weights, np.dot(output_gate, input_gate))
return tanh(dot_prod)
|
[
"numpy.random.uniform",
"numpy.ones_like",
"numpy.transpose",
"numpy.zeros",
"numpy.random.random",
"numpy.dot"
] |
[((1357, 1417), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(self.hidden_dim, self.input_shape)'], {}), '(0, 1, (self.hidden_dim, self.input_shape))\n', (1374, 1417), True, 'import numpy as np\n'), ((1448, 1507), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(self.hidden_dim, self.hidden_dim)'], {}), '(0, 1, (self.hidden_dim, self.hidden_dim))\n', (1465, 1507), True, 'import numpy as np\n'), ((1538, 1594), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(output_shape, self.hidden_dim)'], {}), '(0, 1, (output_shape, self.hidden_dim))\n', (1555, 1594), True, 'import numpy as np\n'), ((1617, 1658), 'numpy.zeros', 'np.zeros', (['(self.hidden_dim, output_shape)'], {}), '((self.hidden_dim, output_shape))\n', (1625, 1658), True, 'import numpy as np\n'), ((2103, 2120), 'numpy.zeros', 'np.zeros', (['x.shape'], {}), '(x.shape)\n', (2111, 2120), True, 'import numpy as np\n'), ((2689, 2723), 'numpy.zeros', 'np.zeros', (['self.input_weights.shape'], {}), '(self.input_weights.shape)\n', (2697, 2723), True, 'import numpy as np\n'), ((2737, 2772), 'numpy.zeros', 'np.zeros', (['self.output_weights.shape'], {}), '(self.output_weights.shape)\n', (2745, 2772), True, 'import numpy as np\n'), ((2786, 2821), 'numpy.zeros', 'np.zeros', (['self.hidden_weights.shape'], {}), '(self.hidden_weights.shape)\n', (2794, 2821), True, 'import numpy as np\n'), ((2838, 2872), 'numpy.zeros', 'np.zeros', (['self.input_weights.shape'], {}), '(self.input_weights.shape)\n', (2846, 2872), True, 'import numpy as np\n'), ((2888, 2923), 'numpy.zeros', 'np.zeros', (['self.output_weights.shape'], {}), '(self.output_weights.shape)\n', (2896, 2923), True, 'import numpy as np\n'), ((2939, 2974), 'numpy.zeros', 'np.zeros', (['self.hidden_weights.shape'], {}), '(self.hidden_weights.shape)\n', (2947, 2974), True, 'import numpy as np\n'), ((2991, 3025), 'numpy.zeros', 'np.zeros', (['self.input_weights.shape'], {}), '(self.input_weights.shape)\n', (2999, 3025), True, 'import numpy as np\n'), ((3041, 3076), 'numpy.zeros', 'np.zeros', (['self.hidden_weights.shape'], {}), '(self.hidden_weights.shape)\n', (3049, 3076), True, 'import numpy as np\n'), ((416, 461), 'numpy.random.random', 'np.random.random', (['(input_shape, output_shape)'], {}), '((input_shape, output_shape))\n', (432, 461), True, 'import numpy as np\n'), ((492, 521), 'numpy.random.random', 'np.random.random', (['input_shape'], {}), '(input_shape)\n', (508, 521), True, 'import numpy as np\n'), ((555, 585), 'numpy.random.random', 'np.random.random', (['output_shape'], {}), '(output_shape)\n', (571, 585), True, 'import numpy as np\n'), ((2239, 2276), 'numpy.dot', 'np.dot', (['self.input_weights', 'new_input'], {}), '(self.input_weights, new_input)\n', (2245, 2276), True, 'import numpy as np\n'), ((2312, 2352), 'numpy.dot', 'np.dot', (['self.hidden_weights', 'self.output'], {}), '(self.hidden_weights, self.output)\n', (2318, 2352), True, 'import numpy as np\n'), ((2493, 2525), 'numpy.dot', 'np.dot', (['self.output_weights', 'out'], {}), '(self.output_weights, out)\n', (2499, 2525), True, 'import numpy as np\n'), ((3143, 3160), 'numpy.zeros', 'np.zeros', (['x.shape'], {}), '(x.shape)\n', (3151, 3160), True, 'import numpy as np\n'), ((3218, 3255), 'numpy.dot', 'np.dot', (['self.input_weights', 'new_input'], {}), '(self.input_weights, new_input)\n', (3224, 3255), True, 'import numpy as np\n'), ((3291, 3331), 'numpy.dot', 'np.dot', (['self.hidden_weights', 'self.output'], {}), '(self.hidden_weights, self.output)\n', (3297, 3331), True, 'import numpy as np\n'), ((3436, 3468), 'numpy.dot', 'np.dot', (['self.output_weights', 'out'], {}), '(self.output_weights, out)\n', (3442, 3468), True, 'import numpy as np\n'), ((5813, 5858), 'numpy.random.random', 'np.random.random', (['(input_shape, output_shape)'], {}), '((input_shape, output_shape))\n', (5829, 5858), True, 'import numpy as np\n'), ((5890, 5920), 'numpy.random.random', 'np.random.random', (['output_shape'], {}), '(output_shape)\n', (5906, 5920), True, 'import numpy as np\n'), ((756, 787), 'numpy.dot', 'np.dot', (['output_gate', 'input_gate'], {}), '(output_gate, input_gate)\n', (762, 787), True, 'import numpy as np\n'), ((3692, 3722), 'numpy.transpose', 'np.transpose', (["layers[t]['out']"], {}), "(layers[t]['out'])\n", (3704, 3722), True, 'import numpy as np\n'), ((3749, 3782), 'numpy.transpose', 'np.transpose', (['self.output_weights'], {}), '(self.output_weights)\n', (3761, 3782), True, 'import numpy as np\n'), ((3883, 3917), 'numpy.ones_like', 'np.ones_like', (['previous_output_prod'], {}), '(previous_output_prod)\n', (3895, 3917), True, 'import numpy as np\n'), ((3948, 3981), 'numpy.transpose', 'np.transpose', (['self.hidden_weights'], {}), '(self.hidden_weights)\n', (3960, 3981), True, 'import numpy as np\n'), ((4295, 4345), 'numpy.dot', 'np.dot', (['self.hidden_weights', "layers[t]['prev_out']"], {}), "(self.hidden_weights, layers[t]['prev_out'])\n", (4301, 4345), True, 'import numpy as np\n'), ((4450, 4467), 'numpy.zeros', 'np.zeros', (['x.shape'], {}), '(x.shape)\n', (4458, 4467), True, 'import numpy as np\n'), ((4527, 4564), 'numpy.dot', 'np.dot', (['self.input_weights', 'new_input'], {}), '(self.input_weights, new_input)\n', (4533, 4564), True, 'import numpy as np\n'), ((6151, 6182), 'numpy.dot', 'np.dot', (['output_gate', 'input_gate'], {}), '(output_gate, input_gate)\n', (6157, 6182), True, 'import numpy as np\n'), ((4180, 4214), 'numpy.ones_like', 'np.ones_like', (['previous_output_prod'], {}), '(previous_output_prod)\n', (4192, 4214), True, 'import numpy as np\n'), ((4246, 4270), 'numpy.ones_like', 'np.ones_like', (['input_prod'], {}), '(input_prod)\n', (4258, 4270), True, 'import numpy as np\n'), ((4379, 4412), 'numpy.transpose', 'np.transpose', (['self.hidden_weights'], {}), '(self.hidden_weights)\n', (4391, 4412), True, 'import numpy as np\n'), ((4593, 4625), 'numpy.transpose', 'np.transpose', (['self.input_weights'], {}), '(self.input_weights)\n', (4605, 4625), True, 'import numpy as np\n')]
|
import os
import sys
import wget
import numpy as np
from numpy import unique
from Bio.SeqIO import parse as parse_fasta
from .geneontology import *
from .consts import *
cafa3_targets_url = 'http://biofunctionprediction.org/cafa-targets/CAFA3_targets.tgz'
cafa3_train_url = 'http://biofunctionprediction.org/cafa-targets/CAFA3_training_data.tgz'
cafa2_data_url = 'https://ndownloader.figshare.com/files/3658395'
cafa2_targets_url = 'http://biofunctionprediction.org/cafa-targets/CAFA-2013-targets.tgz'
verbose = True
def set_verbose(val):
global verbose
verbose = val
def blocks(files, size=8192*1024):
while True:
buffer = files.read(size)
if not buffer:
break
yield buffer
def count_lines(fpath, sep=bytes('\n', 'utf8')):
with open(fpath, "rb") as f:
return sum(bl.count(sep) for bl in blocks(f))
class SequenceLoader(object):
def __init__(self, src_sequence, num_sequences):
self.sequence_source = src_sequence
self.num_sequences = num_sequences
def load(self):
n = self.num_sequences
seq_id2seq = dict()
for i, seq in enumerate(self.sequence_source):
if verbose:
sys.stdout.write("\r{0:.0f}%".format(100.0 * i/n))
seq_id, seq_seq = self.parse_sequence(seq)
seq_id2seq[seq_id] = seq_seq
if verbose:
print("\nFinished loading %s sequences!" % len(seq_id2seq))
return seq_id2seq
def __iter__(self):
for _, seq in enumerate(self.sequence_source):
seq_id, seq_seq = self.parse_sequence(seq)
if seq_seq is None or seq_id is None:
continue
yield seq_id, seq_seq
def parse_sequence(self, seq):
return None, None
class FastaFileLoader(SequenceLoader):
def __init__(self, src_fasta, num_seqs):
super(FastaFileLoader, self).__init__(src_fasta, num_seqs)
def parse_sequence(self, seq):
return seq.id, seq.seq
class UniprotCollectionLoader(SequenceLoader):
def __init__(self, src_sequence, num_sequences):
super(UniprotCollectionLoader, self).__init__(src_sequence, num_sequences)
def parse_sequence(self, doc):
return doc["_id"], doc["sequence"]
class PssmCollectionLoader(SequenceLoader):
def __init__(self, src_sequence, num_sequences):
super(PssmCollectionLoader, self).__init__(src_sequence, num_sequences)
def parse_sequence(self, doc):
if "seq" in doc and "pssm" in doc and "alignment" in doc:
return doc["_id"], (doc["seq"], doc["pssm"], doc["alignment"])
else:
return None, None
class MappingLoader(object):
def __init__(self, src_mapping, num_mapping):
self.mapping_source = src_mapping
self.mapping_count = num_mapping
def load(self):
n = self.mapping_count
direct_map, reverse_map = dict(), dict()
for i, item in enumerate(self.mapping_source):
if verbose:
sys.stdout.write("\r{0:.0f}%".format(100.0 * i/n))
id1, id2 = self.parse_mapping(item)
if (not id1) and (not id2):
continue
try:
if id1 in direct_map:
direct_map[id1].add(id2)
else:
direct_map[id1] = {id2}
if id2 in reverse_map:
reverse_map[id2].add(id1)
else:
reverse_map[id2] = {id1}
except TypeError:
pass
if verbose:
m = sum(map(len, direct_map.values()))
print("\nFinished loading %s mappings!" % m)
return direct_map, reverse_map
def parse_mapping(self, entry):
return None, None
class MappingFileLoader(MappingLoader):
def __init__(self, file_src, line_num):
super(MappingFileLoader, self).__init__(file_src, line_num)
def parse_mapping(self, line):
s_line = line.strip().split()
if len(s_line) != 2:
return None, None
else:
return s_line
class GoAnnotationLoader(MappingLoader):
def __init__(self, src_annotations, num_annotations, aspect=GoAspect()):
super(GoAnnotationLoader, self)\
.__init__(src_annotations, num_annotations)
self.aspect = aspect
def parse_mapping(self, entry):
return None, None
class GoAnnotationFileLoader(GoAnnotationLoader):
def __init__(self, annotation_file_io, num_lines, aspect):
super(GoAnnotationFileLoader, self).__init__(annotation_file_io, num_lines, aspect)
def parse_mapping(self, line):
seq_id, go_id, go_asp = line.strip().split('\t')
if go_asp == self.aspect:
return seq_id, go_id
else:
return None, None
class GoAnnotationCollectionLoader(GoAnnotationLoader):
def __init__(self, annotation_cursor, annotation_count, aspect):
super(GoAnnotationCollectionLoader, self) .__init__(annotation_cursor, annotation_count, aspect)
def parse_mapping(self, doc):
seq_id, go_id, go_asp = doc["DB_Object_ID"], doc["GO_ID"], doc["Aspect"]
if go_asp == self.aspect:
return seq_id, go_id
else:
return None, None
class Record(object):
pass
class Seq2Vec(object):
def __init__(self, model):
self._w2v = model
def __getitem__(self, seq):
return np.array([self._w2v[aa] for aa in seq], dtype=np.float64)
class Identity(object):
def __call__(self, x):
return x
class Dataset(object):
def __init__(self, uid2seq, uid2lbl, ontology,
embedder=Seq2Vec(AA.aa2index),
transform=Identity()):
self._emb = embedder
self.onto = ontology
self.transform = transform
self.records = []
self.do_init(uid2seq, uid2lbl)
self.augmented = False
def do_init(self, uid2seq, uid2lbl):
records = self.records = []
keys = uid2lbl.keys()
for uid in keys:
record = Record()
if uid not in uid2seq:
continue
record.uid = uid
record.lbl = uid2lbl[uid]
record.seq = uid2seq[uid]
records.append(record)
def augment(self, max_length=None):
if self.augmented:
return
n, m = len(self), 0
onto = self.onto
for i, record in enumerate(self.records):
if verbose:
sys.stdout.write("\r{0:.0f}%".format(100.0 * i/n))
record.lbl = onto.propagate(record.lbl, max_length)
self.augmented = True
def __str__(self):
num_anno = sum(map(lambda record: len(record.lbl), self.records))
num_go = len(reduce(lambda x, y: x | y, map(lambda r: set(r.lbl), self.records), set()))
num_seq = len(self)
s = '\n#Annotaions\t%d\n#GO-Terms\t%d\n#Sequences\t%d' % (num_anno, num_go, num_seq)
return s
@staticmethod
def to_dictionaries(records):
uid2seq = {record.uid: record.seq for record in records}
uid2lbl = {record.uid: record.lbl for record in records}
return uid2seq, uid2lbl
def update(self, other):
uid2seq, uid2lbl = Dataset.to_dictionaries(self.records)
for record in other.records:
if record.uid in uid2lbl:
uid2lbl[record.uid] |= record.lbl
else:
uid2lbl[record.uid] = record.lbl
uid2seq.update(Dataset.to_dictionaries(other.records)[0])
self.do_init(uid2seq, uid2lbl)
return self
def split(self, ratio=0.2): # split and make sure distrib. of length is the same
data = np.array(sorted(self.records, key=lambda r: len(r.seq)))
n, onto = len(data), self.onto
train_indx = np.array([bool(i % round(1/ratio)) for i in range(n)])
test_indx = np.invert(train_indx)
train_records, test_records = data[train_indx], data[test_indx]
train_uid2sec, train_uid2lbl = Dataset.to_dictionaries(train_records)
test_uid2sec, test_uid2lbl = Dataset.to_dictionaries(test_records)
return Dataset(train_uid2sec, train_uid2lbl, onto, self._emb,
transform=self.transform),\
Dataset(test_uid2sec, test_uid2lbl, onto, self._emb,
transform=self.transform)
@property
def labels(self):
return list(record.lbl for record in self.records)
def __len__(self):
return len(self.records)
def __getitem__(self, i):
emb, onto, fn, r = self._emb, self.onto, self.transform, self.records[i]
return fn(emb[r.seq]), np.array([onto[go] for go in r.lbl])
def __iter__(self):
for i in range(len(self)):
yield self[i]
class DataLoader(object):
def __init__(self, dataset, batch_size):
self.batch_size = batch_size
self.dataset = dataset
def __iter__(self):
dataset = self.dataset
batch_size = self.batch_size
dataset.records.sort(key=lambda r: -len(r.seq))
M = len(dataset)
seq, lbl = dataset[0]
N = lbl.shape[1]
B = min(M, batch_size)
T, D = seq.shape
batch_lbl = np.zeros((B, N))
batch_seq = np.zeros((B, 1, D, T))
i = 0
while i < M:
j = 0
while j < B:
seq, lbl = dataset[i + j]
B = min(M - i, batch_size)
L, D = seq.shape
batch_seq[j, :, :, :L] = seq.reshape((D, L))
batch_lbl[j, :] = lbl.reshape((N,))
j += 1
i += j
yield batch_seq[:B, :, :, :T], batch_lbl[:B, :]
def load_training_data_from_collections(annot_collection, seq_collection, aspect,
from_date=None, to_date=None, exp=True, names=None):
query = {"DB": "UniProtKB"}
if from_date and to_date:
query["Date"] = {"$gte": from_date, "$lte": to_date}
elif to_date:
query["Date"] = {"$lte": to_date}
elif from_date:
query["Date"] = {"$gte": from_date}
if exp:
query["Evidence"] = {"$in": exp_codes}
if names:
query["DB_Object_Symbol"] = {"$in": names}
annot_src = annot_collection.find(query)
annot_num = annot_collection.count(query)
seq_id2go_id, go_id2seq_id = \
GoAnnotationCollectionLoader(annot_src, annot_num, aspect).load()
query = {"_id": {"$in": unique(list(seq_id2go_id.keys())).tolist()}}
sequence_src = seq_collection.find(query)
sequence_num = seq_collection.count(query)
seq_id2seq = UniprotCollectionLoader(sequence_src, sequence_num).load()
return seq_id2seq, seq_id2go_id, go_id2seq_id
def filter_labels_by(filter_func, direct_dict, reverse_dict):
labels_to_be_deleted = set()
for uid in reverse_dict.keys():
if filter_func(reverse_dict[uid]):
labels_to_be_deleted.add(uid)
sequences_to_be_deleted = set()
for uid in direct_dict:
direct_dict[uid] -= labels_to_be_deleted
if len(direct_dict[uid]) == 0:
sequences_to_be_deleted.add(uid)
for uid in sequences_to_be_deleted:
del direct_dict[uid]
def filter_sequences_by(filter_func, seq_dict, lbl_dict):
uids_to_be_deleted = set()
for uid in seq_dict:
if filter_func(seq_dict[uid]):
uids_to_be_deleted.add(uid)
for uid in uids_to_be_deleted:
if uid in lbl_dict:
del lbl_dict[uid]
del seq_dict[uid]
def load_training_data_from_files(annots_tsv, fasta_fname, aspect):
annot_src = open(annots_tsv, 'r')
num_annot = count_lines(annots_tsv, sep=bytes('\n', 'utf8'))
seq_id2go_id, go_id2seq_id = \
GoAnnotationFileLoader(annot_src, num_annot, aspect).load()
num_seq = count_lines(fasta_fname, sep=bytes('>', 'utf8'))
fasta_src = parse_fasta(open(fasta_fname, 'r'), 'fasta')
seq_id2seq = FastaFileLoader(fasta_src, num_seq).load()
return seq_id2seq, seq_id2go_id, go_id2seq_id
def load_cafa3_targets(targets_dir, mapping_dir):
trg_id2seq, trg_id2seq_id, seq_id2trg_id = dict(), dict(), dict()
for fname in os.listdir(targets_dir):
print("\nLoading: %s" % fname)
fpath = "%s/%s" % (targets_dir, fname)
num_seq = count_lines(fpath, sep=bytes('>', 'utf8'))
fasta_src = parse_fasta(open(fpath, 'r'), 'fasta')
trg_id2seq.update(FastaFileLoader(fasta_src, num_seq).load())
for fname in os.listdir(mapping_dir):
print("\nLoading: %s" % fname)
fpath = "%s/%s" % (mapping_dir, fname)
num_mapping = count_lines(fpath, sep=bytes('\n', 'utf8'))
src_mapping = open(fpath, 'r')
d1, d2 = MappingFileLoader(src_mapping, num_mapping).load()
trg_id2seq_id.update(d1)
seq_id2trg_id.update(d2)
return trg_id2seq, trg_id2seq_id, seq_id2trg_id
def unzip(src, trg):
if ".zip" in src:
res = os.system('unzip %s -d %s' % (src, trg))
assert res == 0
elif ".tgz" in src:
res = os.system('tar -xvzf %s -C %s' % (src, trg))
assert res == 0
else:
res = os.system('unzip %s -d %s' % (src, trg))
if res != 0: print("failed to decompress")
def wget_and_unzip(sub_dir, rel_dir, url):
print("Downloading %s" % sub_dir)
fname = wget.download(url, out=rel_dir)
unzip(fname, rel_dir)
os.remove(fname)
def load_data(db, asp='F', codes=exp_codes, limit=None):
q = {'Evidence': {'$in': codes}, 'DB': 'UniProtKB'}
c = limit if limit else db.goa_uniprot.count(q)
s = db.goa_uniprot.find(q)
if limit: s = s.limit(limit)
seqid2goid, goid2seqid = GoAnnotationCollectionLoader(s, c, asp).load()
query = {"_id": {"$in": unique(list(seqid2goid.keys())).tolist()}}
num_seq = db.uniprot.count(query)
src_seq = db.uniprot.find(query)
seqid2seq = UniprotCollectionLoader(src_seq, num_seq).load()
onto = get_ontology(asp)
return Dataset(seqid2seq, seqid2goid, onto), goid2seqid
def load_cafa3(db, data_dir, asp, aa_emb=AA.aa2index, seqs_filter=None, lbls_filter=None, trans=None):
aspect = GoAspect(asp)
seq2vec = Seq2Vec(aa_emb)
cafa3_train_dir = '%s/CAFA3_training_data' % data_dir
if not os.path.exists(cafa3_train_dir):
wget_and_unzip('CAFA3_training_data', data_dir, cafa3_train_url)
cafa3_go_tsv = '%s/uniprot_sprot_exp.txt' % cafa3_train_dir
cafa3_train_fasta = '%s/uniprot_sprot_exp.fasta' % cafa3_train_dir
seq_id2seq, seq_id2go_id, go_id2seq_id = \
load_training_data_from_files(cafa3_go_tsv, cafa3_train_fasta, asp)
if seqs_filter:
filter_sequences_by(seqs_filter, seq_id2seq, seq_id2go_id)
if lbls_filter:
filter_labels_by(lbls_filter, seq_id2go_id, go_id2seq_id)
train_set = Dataset(seq_id2seq, seq_id2go_id, seq2vec, transform=trans)
cafa3_targets_dir = '%s/Target files' % data_dir
cafa3_mapping_dir = '%s/Mapping files' % data_dir
if not os.path.exists(cafa3_targets_dir) or not os.path.exists(cafa3_mapping_dir):
wget_and_unzip('CAFA3_targets', data_dir, cafa3_targets_url)
annots_fname = 'leafonly_%s_unique.txt' % aspect
annots_fpath = '%s/CAFA3_benchmark20170605/groundtruth/%s' % (data_dir, annots_fname)
trg_id2seq, _, _ = load_cafa3_targets(cafa3_targets_dir, cafa3_mapping_dir)
num_mapping = count_lines(annots_fpath, sep=bytes('\n', 'utf8'))
src_mapping = open(annots_fpath, 'r')
trg_id2go_id, go_id2trg_id = MappingFileLoader(src_mapping, num_mapping).load()
if seqs_filter:
filter_sequences_by(seqs_filter, trg_id2seq, trg_id2go_id)
if lbls_filter:
filter_labels_by(lbls_filter, trg_id2go_id, go_id2trg_id)
test_set = Dataset(trg_id2seq, trg_id2go_id, seq2vec, transform=trans)
seq_id2seq, seq_id2go_id, go_id2seq_id = \
load_training_data_from_collections(db.goa_uniprot, db.uniprot, asp,
cafa3_cutoff, today_cutoff)
if seqs_filter:
filter_sequences_by(seqs_filter, seq_id2seq, seq_id2go_id)
if lbls_filter:
filter_labels_by(lbls_filter, seq_id2go_id, go_id2seq_id)
valid_set = Dataset(seq_id2seq, seq_id2go_id, seq2vec, transform=trans)
return train_set, valid_set, test_set
def load_cafa2(db, data_dir, aa_emb, seqs_filter=None, lbls_filter=None, transform=None):
sub_dir = cafa2_targets_dir = 'CAFA-2013-targets'
if not os.path.exists('%s/%s' % (data_dir, sub_dir)):
wget_and_unzip(sub_dir, data_dir, cafa2_targets_url)
sub_dir = cafa2_data_dir = 'CAFA2Supplementary_data'
if not os.path.exists('%s/%s' % (data_dir, sub_dir)):
wget_and_unzip(sub_dir, data_dir, cafa2_data_url)
cafa2_targets_dir = './CAFA2Supplementary_data/data/CAFA2-targets'
cafa2_benchmark_dir = './CAFA2Supplementary_data/data/benchmark'
if __name__ == "__main__":
pass
|
[
"os.remove",
"numpy.invert",
"numpy.zeros",
"os.system",
"os.path.exists",
"wget.download",
"numpy.array",
"os.listdir"
] |
[((12276, 12299), 'os.listdir', 'os.listdir', (['targets_dir'], {}), '(targets_dir)\n', (12286, 12299), False, 'import os\n'), ((12595, 12618), 'os.listdir', 'os.listdir', (['mapping_dir'], {}), '(mapping_dir)\n', (12605, 12618), False, 'import os\n'), ((13440, 13471), 'wget.download', 'wget.download', (['url'], {'out': 'rel_dir'}), '(url, out=rel_dir)\n', (13453, 13471), False, 'import wget\n'), ((13502, 13518), 'os.remove', 'os.remove', (['fname'], {}), '(fname)\n', (13511, 13518), False, 'import os\n'), ((5472, 5529), 'numpy.array', 'np.array', (['[self._w2v[aa] for aa in seq]'], {'dtype': 'np.float64'}), '([self._w2v[aa] for aa in seq], dtype=np.float64)\n', (5480, 5529), True, 'import numpy as np\n'), ((7951, 7972), 'numpy.invert', 'np.invert', (['train_indx'], {}), '(train_indx)\n', (7960, 7972), True, 'import numpy as np\n'), ((9308, 9324), 'numpy.zeros', 'np.zeros', (['(B, N)'], {}), '((B, N))\n', (9316, 9324), True, 'import numpy as np\n'), ((9345, 9367), 'numpy.zeros', 'np.zeros', (['(B, 1, D, T)'], {}), '((B, 1, D, T))\n', (9353, 9367), True, 'import numpy as np\n'), ((13057, 13097), 'os.system', 'os.system', (["('unzip %s -d %s' % (src, trg))"], {}), "('unzip %s -d %s' % (src, trg))\n", (13066, 13097), False, 'import os\n'), ((14363, 14394), 'os.path.exists', 'os.path.exists', (['cafa3_train_dir'], {}), '(cafa3_train_dir)\n', (14377, 14394), False, 'import os\n'), ((16555, 16600), 'os.path.exists', 'os.path.exists', (["('%s/%s' % (data_dir, sub_dir))"], {}), "('%s/%s' % (data_dir, sub_dir))\n", (16569, 16600), False, 'import os\n'), ((16731, 16776), 'os.path.exists', 'os.path.exists', (["('%s/%s' % (data_dir, sub_dir))"], {}), "('%s/%s' % (data_dir, sub_dir))\n", (16745, 16776), False, 'import os\n'), ((8732, 8768), 'numpy.array', 'np.array', (['[onto[go] for go in r.lbl]'], {}), '([onto[go] for go in r.lbl])\n', (8740, 8768), True, 'import numpy as np\n'), ((13160, 13204), 'os.system', 'os.system', (["('tar -xvzf %s -C %s' % (src, trg))"], {}), "('tar -xvzf %s -C %s' % (src, trg))\n", (13169, 13204), False, 'import os\n'), ((13253, 13293), 'os.system', 'os.system', (["('unzip %s -d %s' % (src, trg))"], {}), "('unzip %s -d %s' % (src, trg))\n", (13262, 13293), False, 'import os\n'), ((15096, 15129), 'os.path.exists', 'os.path.exists', (['cafa3_targets_dir'], {}), '(cafa3_targets_dir)\n', (15110, 15129), False, 'import os\n'), ((15137, 15170), 'os.path.exists', 'os.path.exists', (['cafa3_mapping_dir'], {}), '(cafa3_mapping_dir)\n', (15151, 15170), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
# linear_regression/test_polynomial.py
"""
Created on Thu Jan 25 19:24:00 2018
@author: jercas
"""
import regression
from matplotlib import cm
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
import numpy as np
if __name__ == "__main__":
X, y = regression.loadDataSet('./temperature.txt')
m,n = X.shape
X = np.concatenate((np.ones((m,1)), X), axis=1)
rate = 0.0001
maxLoop = 1000
epsilon =0.01
result, timeConsumed = regression.bgd(X, y, rate, maxLoop, epsilon)
theta, errors, thetas = result
# 绘制拟合曲线
fittingFig = plt.figure()
title = 'bgd: rate=%.3f, maxLoop=%d, epsilon=%.3f \n time: %ds'%(rate,maxLoop,epsilon,timeConsumed)
ax = fittingFig.add_subplot(111, title=title)
trainingSet = ax.scatter(X[:, 1].flatten().A[0], y[:,0].flatten().A[0])
xCopy = X.copy()
xCopy.sort(0)
yHat = xCopy*theta
fittingLine, = ax.plot(xCopy[:,1], yHat, color='g')
ax.set_xlabel('temperature')
ax.set_ylabel('yield')
plt.legend([trainingSet, fittingLine], ['Training Set', 'Linear Regression'])
plt.show()
# 绘制误差曲线
errorsFig = plt.figure()
ax = errorsFig.add_subplot(111)
ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%.4f'))
ax.plot(range(len(errors)), errors)
ax.set_xlabel('Number of iterations')
ax.set_ylabel('Cost J')
plt.show()
# -----------------------------------------------------------------------------------------------------------------
X, y = regression.loadDataSet('./temperature.txt')
m, n = X.shape
srcX = np.concatenate((X[:, 0], np.power(X[:, 0], 2)), axis=1)
# 特征缩放
srcX = regression.standardize(srcX.copy())
srcX = np.concatenate((np.ones((m, 1)), srcX), axis=1)
rate = 0.1
maxLoop = 1000
epsilon = 0.01
result, timeConsumed = regression.bgd(srcX, y, rate, maxLoop, epsilon)
theta, errors, thetas = result
# 打印特征点
fittingFig = plt.figure()
title = 'polynomial with bgd: rate=%.2f, maxLoop=%d, epsilon=%.3f \n time: %ds' % (rate, maxLoop, epsilon, timeConsumed)
ax = fittingFig.add_subplot(111, title=title)
trainingSet = ax.scatter(X[:, 0].flatten().A[0], y[:, 0].flatten().A[0])
# 打印拟合曲线
xx = np.linspace(50, 100, 50)
xx2 = np.power(xx, 2)
yHat = []
for i in range(50):
normalizedSize = (xx[i] - xx.mean()) / xx.std(0)
normalizedSize2 = (xx2[i] - xx2.mean()) / xx2.std(0)
xHat = np.matrix([[1, normalizedSize, normalizedSize2]])
yHat.append(regression.hypothesis(theta, xHat.T))
fittingLine, = ax.plot(xx, yHat, color='g')
ax.set_xlabel('temperature')
ax.set_ylabel('yield')
plt.legend([trainingSet, fittingLine], ['Training Set', 'Polynomial Regression'])
plt.show()
# 打印误差曲线
errorsFig = plt.figure()
ax = errorsFig.add_subplot(111)
ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%.2e'))
ax.plot(range(len(errors)), errors)
ax.set_xlabel('Number of iterations')
ax.set_ylabel('Cost J')
plt.show()
|
[
"numpy.matrix",
"matplotlib.pyplot.show",
"numpy.power",
"matplotlib.pyplot.legend",
"numpy.ones",
"regression.bgd",
"matplotlib.pyplot.figure",
"regression.loadDataSet",
"matplotlib.ticker.FormatStrFormatter",
"numpy.linspace",
"regression.hypothesis"
] |
[((330, 373), 'regression.loadDataSet', 'regression.loadDataSet', (['"""./temperature.txt"""'], {}), "('./temperature.txt')\n", (352, 373), False, 'import regression\n'), ((511, 555), 'regression.bgd', 'regression.bgd', (['X', 'y', 'rate', 'maxLoop', 'epsilon'], {}), '(X, y, rate, maxLoop, epsilon)\n', (525, 555), False, 'import regression\n'), ((614, 626), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (624, 626), True, 'import matplotlib.pyplot as plt\n'), ((1012, 1089), 'matplotlib.pyplot.legend', 'plt.legend', (['[trainingSet, fittingLine]', "['Training Set', 'Linear Regression']"], {}), "([trainingSet, fittingLine], ['Training Set', 'Linear Regression'])\n", (1022, 1089), True, 'import matplotlib.pyplot as plt\n'), ((1091, 1101), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1099, 1101), True, 'import matplotlib.pyplot as plt\n'), ((1126, 1138), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1136, 1138), True, 'import matplotlib.pyplot as plt\n'), ((1339, 1349), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1347, 1349), True, 'import matplotlib.pyplot as plt\n'), ((1477, 1520), 'regression.loadDataSet', 'regression.loadDataSet', (['"""./temperature.txt"""'], {}), "('./temperature.txt')\n", (1499, 1520), False, 'import regression\n'), ((1780, 1827), 'regression.bgd', 'regression.bgd', (['srcX', 'y', 'rate', 'maxLoop', 'epsilon'], {}), '(srcX, y, rate, maxLoop, epsilon)\n', (1794, 1827), False, 'import regression\n'), ((1884, 1896), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1894, 1896), True, 'import matplotlib.pyplot as plt\n'), ((2157, 2181), 'numpy.linspace', 'np.linspace', (['(50)', '(100)', '(50)'], {}), '(50, 100, 50)\n', (2168, 2181), True, 'import numpy as np\n'), ((2189, 2204), 'numpy.power', 'np.power', (['xx', '(2)'], {}), '(xx, 2)\n', (2197, 2204), True, 'import numpy as np\n'), ((2555, 2640), 'matplotlib.pyplot.legend', 'plt.legend', (['[trainingSet, fittingLine]', "['Training Set', 'Polynomial Regression']"], {}), "([trainingSet, fittingLine], ['Training Set',\n 'Polynomial Regression'])\n", (2565, 2640), True, 'import matplotlib.pyplot as plt\n'), ((2639, 2649), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2647, 2649), True, 'import matplotlib.pyplot as plt\n'), ((2674, 2686), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2684, 2686), True, 'import matplotlib.pyplot as plt\n'), ((2887, 2897), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2895, 2897), True, 'import matplotlib.pyplot as plt\n'), ((1202, 1234), 'matplotlib.ticker.FormatStrFormatter', 'mtick.FormatStrFormatter', (['"""%.4f"""'], {}), "('%.4f')\n", (1226, 1234), True, 'import matplotlib.ticker as mtick\n'), ((2352, 2401), 'numpy.matrix', 'np.matrix', (['[[1, normalizedSize, normalizedSize2]]'], {}), '([[1, normalizedSize, normalizedSize2]])\n', (2361, 2401), True, 'import numpy as np\n'), ((2750, 2782), 'matplotlib.ticker.FormatStrFormatter', 'mtick.FormatStrFormatter', (['"""%.2e"""'], {}), "('%.2e')\n", (2774, 2782), True, 'import matplotlib.ticker as mtick\n'), ((411, 426), 'numpy.ones', 'np.ones', (['(m, 1)'], {}), '((m, 1))\n', (418, 426), True, 'import numpy as np\n'), ((1571, 1591), 'numpy.power', 'np.power', (['X[:, 0]', '(2)'], {}), '(X[:, 0], 2)\n', (1579, 1591), True, 'import numpy as np\n'), ((1678, 1693), 'numpy.ones', 'np.ones', (['(m, 1)'], {}), '((m, 1))\n', (1685, 1693), True, 'import numpy as np\n'), ((2416, 2452), 'regression.hypothesis', 'regression.hypothesis', (['theta', 'xHat.T'], {}), '(theta, xHat.T)\n', (2437, 2452), False, 'import regression\n')]
|
from dataclasses import dataclass
from logging import Logger, getLogger
from typing import Dict, Union
import numpy as np
import pandas as pd
from omegaconf import MISSING
from fseval.types import IncompatibilityError, TerminalColor
from .._experiment import Experiment
from ._config import RankAndValidatePipeline
@dataclass
class RankingValidator(Experiment, RankAndValidatePipeline):
"""Validates a feature ranking. A feature ranking is validated by comparing the
estimated feature- ranking, importance or support to the ground truth feature
importances. Generally, the ground-truth feature importances are only available
when a dataset is synthetically generated."""
bootstrap_state: int = MISSING
logger: Logger = getLogger(__name__)
def __post_init__(self):
if not (
self.ranker.estimates_feature_importances
or self.ranker.estimates_feature_ranking
or self.ranker.estimates_feature_support
):
raise IncompatibilityError(
f"{self.ranker.name} performs no form of feature ranking: "
+ "this estimator cannot be used as a ranker."
)
super(RankingValidator, self).__post_init__()
@property
def _cache_filename(self):
override = f"bootstrap_state={self.bootstrap_state}"
filename = f"ranking[{override}].pickle"
return filename
def _get_estimator(self):
yield self.ranker
def prefit(self):
self.ranker._load_cache(self._cache_filename, self.storage)
def fit(self, X, y):
self.logger.info(f"fitting ranker: " + TerminalColor.yellow(self.ranker.name))
super(RankingValidator, self).fit(X, y)
def postfit(self):
self.ranker._save_cache(self._cache_filename, self.storage)
def score(self, X, y, **kwargs) -> Union[Dict, pd.DataFrame, np.generic, None]:
"""Scores a feature ranker, if a ground-truth on the desired dataset
feature importances is available. If this is the case, the estimated normalized
feature importances are compared to the desired ones using two metrics:
log loss and the R^2 score. Whilst the log loss converts the ground-truth
desired feature rankings to a binary value, 0/1, the R^2 score always works."""
# ensure ground truth feature_importances are 1-dimensional
feature_importances = kwargs.pop("feature_importances", None)
if feature_importances is not None:
assert (
np.ndim(feature_importances) == 1
), "instance-based not supported yet."
# add fitting time and bootstrap to score
scores_dict = {
"fit_time": self.ranker.fit_time_,
"bootstrap_state": self.bootstrap_state,
}
# create dataframe
scores = pd.DataFrame([scores_dict])
# add custom metrics
for metric_name, metric_class in self.metrics.items():
scores_metric = metric_class.score_ranking(
scores,
self.ranker,
self.bootstrap_state,
self.callbacks,
feature_importances,
)
if scores_metric is not None:
scores = scores_metric
return scores
|
[
"pandas.DataFrame",
"fseval.types.TerminalColor.yellow",
"numpy.ndim",
"fseval.types.IncompatibilityError",
"logging.getLogger"
] |
[((750, 769), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (759, 769), False, 'from logging import Logger, getLogger\n'), ((2854, 2881), 'pandas.DataFrame', 'pd.DataFrame', (['[scores_dict]'], {}), '([scores_dict])\n', (2866, 2881), True, 'import pandas as pd\n'), ((1006, 1143), 'fseval.types.IncompatibilityError', 'IncompatibilityError', (["(f'{self.ranker.name} performs no form of feature ranking: ' +\n 'this estimator cannot be used as a ranker.')"], {}), "(\n f'{self.ranker.name} performs no form of feature ranking: ' +\n 'this estimator cannot be used as a ranker.')\n", (1026, 1143), False, 'from fseval.types import IncompatibilityError, TerminalColor\n'), ((1638, 1676), 'fseval.types.TerminalColor.yellow', 'TerminalColor.yellow', (['self.ranker.name'], {}), '(self.ranker.name)\n', (1658, 1676), False, 'from fseval.types import IncompatibilityError, TerminalColor\n'), ((2539, 2567), 'numpy.ndim', 'np.ndim', (['feature_importances'], {}), '(feature_importances)\n', (2546, 2567), True, 'import numpy as np\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.